Organizes 11 projects for Cerbo GX/Venus OS into a single repository: - axiom-nmea: Raymarine LightHouse protocol decoder - dbus-generator-ramp: Generator current ramp controller - dbus-lightning: Blitzortung lightning monitor - dbus-meteoblue-forecast: Meteoblue weather forecast - dbus-no-foreign-land: noforeignland.com tracking - dbus-tides: Tide prediction from depth + harmonics - dbus-vrm-history: VRM cloud history proxy - dbus-windy-station: Windy.com weather upload - mfd-custom-app: MFD app deployment package - venus-html5-app: Custom Victron HTML5 app fork - watermaker: Watermaker PLC control UI Adds root README, .gitignore, project template, and per-project .gitignore files. Sensitive config files excluded via .gitignore with .example templates provided. Made-with: Cursor
562 lines
21 KiB
Python
Executable File
562 lines
21 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
Dock Finder - Find SOG and COG fields while stationary at dock.
|
|
|
|
When at dock:
|
|
- SOG bounces between 0.0 and 0.2 kts (0 to ~0.1 m/s)
|
|
- COG jumps wildly between 0 and 359 degrees (0 to ~6.28 radians)
|
|
|
|
This script looks for paired fields that show these patterns:
|
|
1. Speed: small positive values near zero (0-0.1 m/s → 0-0.2 kts)
|
|
2. Angle: values spanning nearly the full 0-2π range (radians)
|
|
|
|
The script tracks variance over time to identify fluctuating fields.
|
|
|
|
Usage:
|
|
python dock_finder.py -i 198.18.5.5
|
|
python dock_finder.py -i 198.18.5.5 --samples 20 --interval 0.5
|
|
"""
|
|
|
|
import argparse
|
|
import math
|
|
import os
|
|
import signal
|
|
import socket
|
|
import struct
|
|
import sys
|
|
import time
|
|
from collections import defaultdict
|
|
from copy import copy
|
|
from dataclasses import dataclass
|
|
from datetime import datetime
|
|
from typing import Any, Dict, List, Optional, Set, Tuple
|
|
|
|
# Add parent directory to path for library import
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
from raymarine_nmea.protocol.parser import ProtobufParser, ProtoField
|
|
from raymarine_nmea.protocol.constants import (
|
|
WIRE_VARINT, WIRE_FIXED64, WIRE_LENGTH, WIRE_FIXED32,
|
|
HEADER_SIZE, RAD_TO_DEG, MS_TO_KTS,
|
|
)
|
|
from raymarine_nmea.sensors import MULTICAST_GROUPS
|
|
|
|
running = True
|
|
|
|
|
|
def signal_handler(signum, frame):
|
|
global running
|
|
running = False
|
|
|
|
|
|
@dataclass
|
|
class FieldStats:
|
|
"""Statistics for a field across multiple samples."""
|
|
path: str
|
|
wire_type: str
|
|
values: List[float]
|
|
|
|
@property
|
|
def count(self) -> int:
|
|
return len(self.values)
|
|
|
|
@property
|
|
def min_val(self) -> float:
|
|
return min(self.values) if self.values else 0
|
|
|
|
@property
|
|
def max_val(self) -> float:
|
|
return max(self.values) if self.values else 0
|
|
|
|
@property
|
|
def range_val(self) -> float:
|
|
return self.max_val - self.min_val
|
|
|
|
@property
|
|
def mean(self) -> float:
|
|
return sum(self.values) / len(self.values) if self.values else 0
|
|
|
|
@property
|
|
def variance(self) -> float:
|
|
if len(self.values) < 2:
|
|
return 0
|
|
mean = self.mean
|
|
return sum((v - mean) ** 2 for v in self.values) / len(self.values)
|
|
|
|
@property
|
|
def std_dev(self) -> float:
|
|
return math.sqrt(self.variance)
|
|
|
|
def is_sog_candidate(self) -> bool:
|
|
"""Check if this could be SOG at dock (0-0.1 m/s, some variance)."""
|
|
# Must be small positive values in m/s range
|
|
if self.min_val < -0.01: # Allow tiny negative noise
|
|
return False
|
|
if self.max_val > 0.2: # Max 0.2 m/s ≈ 0.4 kts
|
|
return False
|
|
if self.max_val < 0.001: # Must have some value
|
|
return False
|
|
# Should have some variance (dock bouncing)
|
|
if self.range_val < 0.001:
|
|
return False
|
|
return True
|
|
|
|
def is_cog_candidate(self) -> bool:
|
|
"""Check if this could be COG at dock (full circle jumps in radians)."""
|
|
# Must be in valid radian range (0 to 2π ≈ 6.28)
|
|
if self.min_val < -0.1:
|
|
return False
|
|
if self.max_val > 7.0: # Allow slightly over 2π
|
|
return False
|
|
# At dock, COG jumps wildly - expect large range
|
|
# Range should span significant portion of circle (at least 90°)
|
|
min_range_rad = math.pi / 2 # 90 degrees in radians
|
|
if self.range_val < min_range_rad:
|
|
return False
|
|
# Variance should be high
|
|
if self.std_dev < 0.5: # Radians
|
|
return False
|
|
return True
|
|
|
|
def as_degrees(self) -> Tuple[float, float, float]:
|
|
"""Return min, max, mean as degrees."""
|
|
return (
|
|
(self.min_val * RAD_TO_DEG) % 360,
|
|
(self.max_val * RAD_TO_DEG) % 360,
|
|
(self.mean * RAD_TO_DEG) % 360
|
|
)
|
|
|
|
def as_knots(self) -> Tuple[float, float, float]:
|
|
"""Return min, max, mean as knots."""
|
|
return (
|
|
self.min_val * MS_TO_KTS,
|
|
self.max_val * MS_TO_KTS,
|
|
self.mean * MS_TO_KTS
|
|
)
|
|
|
|
|
|
def decode_float(raw: bytes) -> Optional[float]:
|
|
"""Decode 4 bytes as little-endian float."""
|
|
if len(raw) == 4:
|
|
try:
|
|
val = struct.unpack('<f', raw)[0]
|
|
if val == val: # NaN check
|
|
return val
|
|
except struct.error:
|
|
pass
|
|
return None
|
|
|
|
|
|
def decode_double(raw: bytes) -> Optional[float]:
|
|
"""Decode 8 bytes as little-endian double."""
|
|
if len(raw) == 8:
|
|
try:
|
|
val = struct.unpack('<d', raw)[0]
|
|
if val == val: # NaN check
|
|
return val
|
|
except struct.error:
|
|
pass
|
|
return None
|
|
|
|
|
|
def scan_fields(pf: ProtoField, path: str, results: Dict[str, Tuple[str, float]]):
|
|
"""Recursively scan fields and collect numeric values with their wire types."""
|
|
|
|
if pf.wire_type == WIRE_FIXED32:
|
|
val = decode_float(pf.value)
|
|
if val is not None:
|
|
results[path] = ('f32', val)
|
|
|
|
elif pf.wire_type == WIRE_FIXED64:
|
|
val = decode_double(pf.value)
|
|
if val is not None:
|
|
results[path] = ('f64', val)
|
|
|
|
elif pf.wire_type == WIRE_VARINT:
|
|
# Skip varints - unlikely to be COG/SOG
|
|
pass
|
|
|
|
# Recurse into children
|
|
if pf.children:
|
|
for child_num, child in pf.children.items():
|
|
scan_fields(child, f"{path}.{child_num}", results)
|
|
|
|
|
|
def scan_packet(packet: bytes) -> Dict[str, Tuple[str, float]]:
|
|
"""Scan a packet and return all numeric fields."""
|
|
results = {}
|
|
if len(packet) < HEADER_SIZE + 10:
|
|
return results
|
|
|
|
proto_data = packet[HEADER_SIZE:]
|
|
parser = ProtobufParser(proto_data)
|
|
fields = parser.parse_message(collect_repeated={14, 16, 20})
|
|
|
|
for field_num, val in fields.items():
|
|
if isinstance(val, list):
|
|
for i, pf in enumerate(val):
|
|
scan_fields(pf, f"{field_num}[{i}]", results)
|
|
else:
|
|
scan_fields(val, f"{field_num}", results)
|
|
|
|
return results
|
|
|
|
|
|
def find_parent_group(path: str) -> str:
|
|
"""Extract parent field group from path (e.g., '3.1' -> '3')."""
|
|
parts = path.split('.')
|
|
return parts[0] if parts else path
|
|
|
|
|
|
def main():
|
|
global running
|
|
|
|
parser = argparse.ArgumentParser(
|
|
description="Find SOG/COG fields while at dock",
|
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
epilog="""
|
|
Expected patterns at dock:
|
|
SOG: ~0.0-0.2 kts (0-0.1 m/s) with small fluctuations
|
|
COG: Wildly jumping 0-359° (0-6.28 rad) due to GPS noise at low speed
|
|
|
|
The script will identify fields matching these patterns and group them.
|
|
"""
|
|
)
|
|
parser.add_argument('-i', '--interface', required=True,
|
|
help='Interface IP for Raymarine multicast (e.g., 198.18.5.5)')
|
|
parser.add_argument('-n', '--samples', type=int, default=30,
|
|
help='Number of samples to collect (default: 30)')
|
|
parser.add_argument('--interval', type=float, default=0.5,
|
|
help='Seconds between samples (default: 0.5)')
|
|
parser.add_argument('--sog-max', type=float, default=0.2,
|
|
help='Max expected SOG in knots at dock (default: 0.2)')
|
|
parser.add_argument('--cog-range', type=float, default=90,
|
|
help='Min expected COG range in degrees (default: 90)')
|
|
parser.add_argument('--verbose', '-v', action='store_true',
|
|
help='Show all fields, not just candidates')
|
|
|
|
args = parser.parse_args()
|
|
|
|
signal.signal(signal.SIGINT, signal_handler)
|
|
signal.signal(signal.SIGTERM, signal_handler)
|
|
|
|
# Create sockets
|
|
sockets = []
|
|
for group, port in MULTICAST_GROUPS:
|
|
try:
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
if hasattr(socket, 'SO_REUSEPORT'):
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
|
sock.bind(('', port))
|
|
mreq = struct.pack("4s4s", socket.inet_aton(group), socket.inet_aton(args.interface))
|
|
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
|
sock.setblocking(False)
|
|
sockets.append((sock, group, port))
|
|
except Exception as e:
|
|
print(f"Warning: Could not join {group}:{port}: {e}")
|
|
|
|
if not sockets:
|
|
print("Error: Could not join any multicast groups")
|
|
sys.exit(1)
|
|
|
|
print("=" * 70)
|
|
print("DOCK FINDER - SOG/COG Field Discovery")
|
|
print("=" * 70)
|
|
print(f"Joined {len(sockets)} multicast groups:")
|
|
for sock, group, port in sockets:
|
|
print(f" - {group}:{port}")
|
|
print()
|
|
print(f"Looking for fields at dock:")
|
|
print(f" SOG: 0.0 - {args.sog_max:.1f} kts (fluctuating near zero)")
|
|
print(f" COG: Jumping with range >= {args.cog_range}° (GPS noise at low speed)")
|
|
print(f"Collecting {args.samples} samples at {args.interval}s intervals...")
|
|
print("-" * 70)
|
|
|
|
# Collect samples with diagnostics - track by packet size
|
|
field_data: Dict[str, FieldStats] = {}
|
|
field_data_by_size: Dict[int, Dict[str, FieldStats]] = defaultdict(dict)
|
|
samples_collected = 0
|
|
last_sample_time_by_size: Dict[int, float] = defaultdict(float)
|
|
|
|
# Diagnostic counters
|
|
packets_by_group: Dict[str, int] = defaultdict(int)
|
|
packets_by_size: Dict[int, int] = defaultdict(int)
|
|
empty_parse_count = 0
|
|
total_packets = 0
|
|
|
|
try:
|
|
while running and samples_collected < args.samples:
|
|
for sock, group, port in sockets:
|
|
try:
|
|
data, addr = sock.recvfrom(65535)
|
|
pkt_size = len(data)
|
|
total_packets += 1
|
|
packets_by_group[f"{group}:{port}"] += 1
|
|
packets_by_size[pkt_size] += 1
|
|
|
|
now = time.time()
|
|
# Rate limit per packet size, not globally
|
|
if (now - last_sample_time_by_size[pkt_size]) < args.interval:
|
|
continue
|
|
|
|
results = scan_packet(data)
|
|
if not results:
|
|
empty_parse_count += 1
|
|
continue
|
|
|
|
samples_collected += 1
|
|
last_sample_time_by_size[pkt_size] = now
|
|
|
|
# Update statistics (global and per-size)
|
|
for path, (wire_type, value) in results.items():
|
|
# Global stats
|
|
if path not in field_data:
|
|
field_data[path] = FieldStats(path, wire_type, [])
|
|
field_data[path].values.append(value)
|
|
|
|
# Per-size stats
|
|
size_fields = field_data_by_size[pkt_size]
|
|
if path not in size_fields:
|
|
size_fields[path] = FieldStats(path, wire_type, [])
|
|
size_fields[path].values.append(value)
|
|
|
|
# Progress indicator
|
|
pct = min(100, (samples_collected / args.samples) * 100)
|
|
print(f"\r Collecting: {samples_collected}/{args.samples} ({pct:.0f}%) [pkts: {total_packets}]", end='', flush=True)
|
|
|
|
except BlockingIOError:
|
|
continue
|
|
|
|
time.sleep(0.01)
|
|
|
|
finally:
|
|
for sock, _, _ in sockets:
|
|
sock.close()
|
|
|
|
print() # Newline after progress
|
|
|
|
# Show packet diagnostics
|
|
print()
|
|
print("*** PACKET DIAGNOSTICS ***")
|
|
print("-" * 70)
|
|
print(f" Total packets received: {total_packets}")
|
|
print(f" Packets with no parseable fields: {empty_parse_count}")
|
|
print()
|
|
print(" Packets by multicast group:")
|
|
for grp, cnt in sorted(packets_by_group.items(), key=lambda x: -x[1]):
|
|
print(f" {grp}: {cnt}")
|
|
print()
|
|
print(" Packets by size (top 10):")
|
|
for size, cnt in sorted(packets_by_size.items(), key=lambda x: -x[1])[:10]:
|
|
print(f" {size} bytes: {cnt}")
|
|
|
|
# Show fields by packet size
|
|
print()
|
|
print("*** FIELDS BY PACKET SIZE ***")
|
|
print("-" * 70)
|
|
for pkt_size in sorted(field_data_by_size.keys(), reverse=True):
|
|
size_fields = field_data_by_size[pkt_size]
|
|
if not size_fields:
|
|
continue
|
|
field_paths = sorted(size_fields.keys())
|
|
sample_count = max(s.count for s in size_fields.values()) if size_fields else 0
|
|
print(f"\n {pkt_size} bytes ({sample_count} samples, {len(field_paths)} fields):")
|
|
for path in field_paths[:20]: # Show first 20 fields
|
|
stats = size_fields[path]
|
|
# Show with interpretation
|
|
interp = ""
|
|
if 0 <= stats.min_val and stats.max_val <= 7:
|
|
min_deg = (stats.min_val * RAD_TO_DEG) % 360
|
|
max_deg = (stats.max_val * RAD_TO_DEG) % 360
|
|
interp = f" | {min_deg:.1f}°-{max_deg:.1f}°"
|
|
elif 0 <= stats.min_val and stats.max_val <= 50:
|
|
min_kts = stats.min_val * MS_TO_KTS
|
|
max_kts = stats.max_val * MS_TO_KTS
|
|
interp = f" | {min_kts:.2f}-{max_kts:.2f} kts"
|
|
print(f" {path:<15} {stats.min_val:>10.4f} - {stats.max_val:>10.4f} (range: {stats.range_val:.4f}){interp}")
|
|
if len(field_paths) > 20:
|
|
print(f" ... and {len(field_paths) - 20} more fields")
|
|
|
|
if samples_collected < 5:
|
|
print("\nError: Not enough samples collected. Check your network connection.")
|
|
sys.exit(1)
|
|
|
|
# Analyze results - use per-packet-size data for better detection
|
|
print()
|
|
print("=" * 70)
|
|
print(f"ANALYSIS RESULTS ({samples_collected} samples)")
|
|
print("=" * 70)
|
|
|
|
sog_candidates = []
|
|
cog_candidates = []
|
|
|
|
# Analyze each packet size separately
|
|
for pkt_size, size_fields in field_data_by_size.items():
|
|
for path, stats in size_fields.items():
|
|
if stats.count < 3: # Need at least 3 samples
|
|
continue
|
|
|
|
if stats.is_sog_candidate():
|
|
# Add packet size info to path for clarity
|
|
stats.path = f"{path} ({pkt_size}B)"
|
|
sog_candidates.append(stats)
|
|
if stats.is_cog_candidate():
|
|
stats.path = f"{path} ({pkt_size}B)"
|
|
cog_candidates.append(stats)
|
|
|
|
# Print SOG candidates
|
|
print("\n*** POTENTIAL SOG FIELDS (speed near zero with fluctuation) ***")
|
|
print("-" * 70)
|
|
if sog_candidates:
|
|
for stats in sorted(sog_candidates, key=lambda s: s.std_dev, reverse=True):
|
|
min_kts, max_kts, mean_kts = stats.as_knots()
|
|
print(f" {stats.path}")
|
|
print(f" Raw (m/s): {stats.min_val:.4f} - {stats.max_val:.4f} "
|
|
f"(mean: {stats.mean:.4f}, std: {stats.std_dev:.4f})")
|
|
print(f" As knots: {min_kts:.3f} - {max_kts:.3f} "
|
|
f"(mean: {mean_kts:.3f})")
|
|
print()
|
|
else:
|
|
print(" (No candidates found)")
|
|
print(" Try increasing --sog-max or collecting more samples")
|
|
|
|
# Print COG candidates
|
|
print("\n*** POTENTIAL COG FIELDS (angle jumping widely) ***")
|
|
print("-" * 70)
|
|
if cog_candidates:
|
|
for stats in sorted(cog_candidates, key=lambda s: s.range_val, reverse=True):
|
|
min_deg = (stats.min_val * RAD_TO_DEG) % 360
|
|
max_deg = (stats.max_val * RAD_TO_DEG) % 360
|
|
range_deg = stats.range_val * RAD_TO_DEG
|
|
print(f" {stats.path}")
|
|
print(f" Raw (rad): {stats.min_val:.4f} - {stats.max_val:.4f} "
|
|
f"(range: {stats.range_val:.2f} rad, std: {stats.std_dev:.2f})")
|
|
print(f" As degrees: {min_deg:.1f}° - {max_deg:.1f}° "
|
|
f"(range: {range_deg:.1f}°)")
|
|
print()
|
|
else:
|
|
print(" (No candidates found)")
|
|
print(" Try decreasing --cog-range or collecting more samples")
|
|
|
|
# Look for paired candidates in the same parent group
|
|
print("\n*** PAIRED SOG/COG CANDIDATES (same field group) ***")
|
|
print("-" * 70)
|
|
|
|
sog_groups = {find_parent_group(s.path): s for s in sog_candidates}
|
|
cog_groups = {find_parent_group(s.path): s for s in cog_candidates}
|
|
|
|
common_groups = set(sog_groups.keys()) & set(cog_groups.keys())
|
|
|
|
if common_groups:
|
|
for group in sorted(common_groups):
|
|
sog = sog_groups[group]
|
|
cog = cog_groups[group]
|
|
min_kts, max_kts, _ = sog.as_knots()
|
|
range_deg = cog.range_val * RAD_TO_DEG
|
|
|
|
print(f" Field Group {group}:")
|
|
print(f" SOG: {sog.path}")
|
|
print(f" {min_kts:.3f} - {max_kts:.3f} kts")
|
|
print(f" COG: {cog.path}")
|
|
print(f" Range: {range_deg:.1f}° (std: {cog.std_dev:.2f} rad)")
|
|
print()
|
|
else:
|
|
print(" No paired SOG/COG fields found in the same group")
|
|
print(" SOG and COG may be in different field groups")
|
|
|
|
# Show sample values for top candidates
|
|
if sog_candidates or cog_candidates:
|
|
print("\n*** LAST 5 SAMPLE VALUES ***")
|
|
print("-" * 70)
|
|
|
|
if sog_candidates:
|
|
top_sog = sorted(sog_candidates, key=lambda s: s.std_dev, reverse=True)[0]
|
|
print(f" Top SOG candidate ({top_sog.path}):")
|
|
last_vals = top_sog.values[-5:]
|
|
kts_vals = [v * MS_TO_KTS for v in last_vals]
|
|
print(f" m/s: {[f'{v:.4f}' for v in last_vals]}")
|
|
print(f" kts: {[f'{v:.3f}' for v in kts_vals]}")
|
|
|
|
if cog_candidates:
|
|
top_cog = sorted(cog_candidates, key=lambda s: s.range_val, reverse=True)[0]
|
|
print(f" Top COG candidate ({top_cog.path}):")
|
|
last_vals = top_cog.values[-5:]
|
|
deg_vals = [(v * RAD_TO_DEG) % 360 for v in last_vals]
|
|
print(f" rad: {[f'{v:.4f}' for v in last_vals]}")
|
|
print(f" deg: {[f'{v:.1f}' for v in deg_vals]}")
|
|
|
|
# Show diagnostic info if no candidates found, or if verbose
|
|
no_candidates = not sog_candidates and not cog_candidates
|
|
if args.verbose or no_candidates:
|
|
print("\n*** ALL NUMERIC FIELDS (diagnostic - per packet size) ***")
|
|
print("-" * 70)
|
|
print(f" {'Path':<25} {'Type':<5} {'Min':>12} {'Max':>12} {'Range':>12} {'StdDev':>10}")
|
|
print("-" * 70)
|
|
|
|
# Collect all valid fields from per-packet-size data
|
|
all_fields = []
|
|
for pkt_size, size_fields in sorted(field_data_by_size.items(), reverse=True):
|
|
for path, stats in size_fields.items():
|
|
if stats.count < 3:
|
|
continue
|
|
# Create a copy with packet size in path
|
|
stats_copy = copy(stats)
|
|
stats_copy.path = f"{path} ({pkt_size}B)"
|
|
all_fields.append(stats_copy)
|
|
|
|
# Sort by range (most variable first)
|
|
for stats in sorted(all_fields, key=lambda s: s.range_val, reverse=True):
|
|
print(f" {stats.path:<25} {stats.wire_type:<5} "
|
|
f"{stats.min_val:>12.4f} {stats.max_val:>12.4f} "
|
|
f"{stats.range_val:>12.4f} {stats.std_dev:>10.4f}")
|
|
|
|
# Show interpretation hints for top variable fields
|
|
print("\n*** INTERPRETATION HINTS (top 10 most variable fields) ***")
|
|
print("-" * 70)
|
|
top_variable = sorted(all_fields, key=lambda s: s.range_val, reverse=True)[:10]
|
|
|
|
for stats in top_variable:
|
|
print(f"\n {stats.path} ({stats.wire_type}):")
|
|
print(f" Raw: {stats.min_val:.6f} to {stats.max_val:.6f}")
|
|
|
|
# Try angle interpretation (radians)
|
|
if 0 <= stats.min_val and stats.max_val <= 7:
|
|
min_deg = (stats.min_val * RAD_TO_DEG) % 360
|
|
max_deg = (stats.max_val * RAD_TO_DEG) % 360
|
|
range_deg = stats.range_val * RAD_TO_DEG
|
|
print(f" As angle (rad->deg): {min_deg:.1f}° to {max_deg:.1f}° (range: {range_deg:.1f}°)")
|
|
|
|
# Try speed interpretation (m/s)
|
|
if 0 <= stats.min_val and stats.max_val <= 100:
|
|
min_kts = stats.min_val * MS_TO_KTS
|
|
max_kts = stats.max_val * MS_TO_KTS
|
|
print(f" As speed (m/s->kts): {min_kts:.3f} to {max_kts:.3f} kts")
|
|
|
|
# Try temperature interpretation (Kelvin)
|
|
if 250 <= stats.min_val <= 350:
|
|
min_c = stats.min_val - 273.15
|
|
max_c = stats.max_val - 273.15
|
|
print(f" As temp (K->°C): {min_c:.1f}°C to {max_c:.1f}°C")
|
|
|
|
# GPS coordinate check
|
|
if -180 <= stats.min_val <= 180 and stats.range_val < 1:
|
|
print(f" Could be GPS coordinate (low variance)")
|
|
|
|
if no_candidates:
|
|
print("\n" + "=" * 70)
|
|
print("SUGGESTIONS:")
|
|
print("=" * 70)
|
|
print(" No automatic matches found. Look at the fields above for:")
|
|
print(" - SOG: Small values (< 0.5 m/s) with some variance")
|
|
print(" - COG: Values in 0-6.28 range (radians) with HIGH variance")
|
|
print()
|
|
print(" Common issues:")
|
|
print(" - GPS may not have lock (check for lat/lon)")
|
|
print(" - Values may be in different units than expected")
|
|
print(" - Try: --sog-max 1.0 --cog-range 45")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|