Organizes 11 projects for Cerbo GX/Venus OS into a single repository: - axiom-nmea: Raymarine LightHouse protocol decoder - dbus-generator-ramp: Generator current ramp controller - dbus-lightning: Blitzortung lightning monitor - dbus-meteoblue-forecast: Meteoblue weather forecast - dbus-no-foreign-land: noforeignland.com tracking - dbus-tides: Tide prediction from depth + harmonics - dbus-vrm-history: VRM cloud history proxy - dbus-windy-station: Windy.com weather upload - mfd-custom-app: MFD app deployment package - venus-html5-app: Custom Victron HTML5 app fork - watermaker: Watermaker PLC control UI Adds root README, .gitignore, project template, and per-project .gitignore files. Sensitive config files excluded via .gitignore with .example templates provided. Made-with: Cursor
341 lines
11 KiB
Python
Executable File
341 lines
11 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
COG/SOG Field Finder
|
|
|
|
Searches all protobuf fields for values that match expected COG and SOG ranges.
|
|
Helps identify which fields contain navigation data.
|
|
|
|
Usage:
|
|
python find_cog_sog.py -i 198.18.5.5 --cog-min 0 --cog-max 359 --sog-min 0 --sog-max 0.5
|
|
python find_cog_sog.py -i 198.18.5.5 --show-all # Show ALL numeric fields
|
|
"""
|
|
|
|
import argparse
|
|
import os
|
|
import signal
|
|
import socket
|
|
import struct
|
|
import sys
|
|
import time
|
|
from datetime import datetime
|
|
from typing import Dict, Any, Optional, List, Tuple
|
|
|
|
# Add parent directory to path for library import
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
from raymarine_nmea.protocol.parser import ProtobufParser, ProtoField
|
|
from raymarine_nmea.protocol.constants import (
|
|
WIRE_VARINT, WIRE_FIXED64, WIRE_LENGTH, WIRE_FIXED32,
|
|
HEADER_SIZE, RAD_TO_DEG, MS_TO_KTS,
|
|
)
|
|
from raymarine_nmea.sensors import MULTICAST_GROUPS
|
|
|
|
running = True
|
|
|
|
|
|
def signal_handler(signum, frame):
|
|
global running
|
|
running = False
|
|
|
|
|
|
def decode_float(raw: bytes) -> Optional[float]:
|
|
"""Decode 4 bytes as float."""
|
|
if len(raw) == 4:
|
|
try:
|
|
val = struct.unpack('<f', raw)[0]
|
|
if val == val: # NaN check
|
|
return val
|
|
except struct.error:
|
|
pass
|
|
return None
|
|
|
|
|
|
def decode_double(raw: bytes) -> Optional[float]:
|
|
"""Decode 8 bytes as double."""
|
|
if len(raw) == 8:
|
|
try:
|
|
val = struct.unpack('<d', raw)[0]
|
|
if val == val: # NaN check
|
|
return val
|
|
except struct.error:
|
|
pass
|
|
return None
|
|
|
|
|
|
def get_interpretations(val: float) -> Dict[str, float]:
|
|
"""Get all possible interpretations of a numeric value."""
|
|
interps = {}
|
|
|
|
# Angle interpretations (radians to degrees)
|
|
if 0 <= val <= 6.5:
|
|
interps['deg'] = (val * RAD_TO_DEG) % 360
|
|
|
|
# Speed interpretations (m/s to knots)
|
|
if 0 <= val <= 100:
|
|
interps['kts'] = val * MS_TO_KTS
|
|
|
|
return interps
|
|
|
|
|
|
def check_cog_match(val: float, cog_min: float, cog_max: float) -> Optional[float]:
|
|
"""Check if value could be COG in radians. Returns degrees or None."""
|
|
if 0 <= val <= 6.5: # Valid radian range
|
|
deg = (val * RAD_TO_DEG) % 360
|
|
# Handle wrap-around
|
|
if cog_min <= cog_max:
|
|
if cog_min <= deg <= cog_max:
|
|
return deg
|
|
else: # wrap-around case like 350-10
|
|
if deg >= cog_min or deg <= cog_max:
|
|
return deg
|
|
return None
|
|
|
|
|
|
def check_sog_match(val: float, sog_min: float, sog_max: float) -> Optional[float]:
|
|
"""Check if value could be SOG in m/s. Returns knots or None."""
|
|
if 0 <= val <= 50: # Reasonable m/s range
|
|
kts = val * MS_TO_KTS
|
|
if sog_min <= kts <= sog_max:
|
|
return kts
|
|
return None
|
|
|
|
|
|
def scan_all_fields(pf: ProtoField, path: str, results: List[Dict]):
|
|
"""Recursively scan ALL fields and collect numeric values."""
|
|
|
|
if pf.wire_type == WIRE_FIXED32:
|
|
val = decode_float(pf.value)
|
|
if val is not None:
|
|
interps = get_interpretations(val)
|
|
results.append({
|
|
'path': path,
|
|
'wire': 'f32',
|
|
'raw': val,
|
|
'interps': interps
|
|
})
|
|
|
|
elif pf.wire_type == WIRE_FIXED64:
|
|
val = decode_double(pf.value)
|
|
if val is not None:
|
|
interps = get_interpretations(val)
|
|
results.append({
|
|
'path': path,
|
|
'wire': 'f64',
|
|
'raw': val,
|
|
'interps': interps
|
|
})
|
|
|
|
elif pf.wire_type == WIRE_VARINT:
|
|
val = float(pf.value)
|
|
interps = get_interpretations(val)
|
|
results.append({
|
|
'path': path,
|
|
'wire': 'var',
|
|
'raw': pf.value,
|
|
'interps': interps
|
|
})
|
|
|
|
# Recurse into children
|
|
if pf.children:
|
|
for child_num, child in pf.children.items():
|
|
scan_all_fields(child, f"{path}.{child_num}", results)
|
|
|
|
|
|
def scan_packet(packet: bytes) -> List[Dict]:
|
|
"""Scan a packet and return ALL numeric fields."""
|
|
results = []
|
|
if len(packet) < HEADER_SIZE + 10:
|
|
return results
|
|
|
|
proto_data = packet[HEADER_SIZE:]
|
|
parser = ProtobufParser(proto_data)
|
|
fields = parser.parse_message(collect_repeated={14, 16, 20})
|
|
|
|
for field_num, val in fields.items():
|
|
if isinstance(val, list):
|
|
for i, pf in enumerate(val):
|
|
scan_all_fields(pf, f"{field_num}[{i}]", results)
|
|
else:
|
|
scan_all_fields(val, f"{field_num}", results)
|
|
|
|
return results
|
|
|
|
|
|
def main():
|
|
global running
|
|
|
|
parser = argparse.ArgumentParser(description="Find COG/SOG fields in Raymarine packets")
|
|
parser.add_argument('-i', '--interface', required=True,
|
|
help='Interface IP for Raymarine multicast (e.g., 198.18.5.5)')
|
|
parser.add_argument('--cog-min', type=float, default=0,
|
|
help='Minimum expected COG in degrees (default: 0)')
|
|
parser.add_argument('--cog-max', type=float, default=359,
|
|
help='Maximum expected COG in degrees (default: 359)')
|
|
parser.add_argument('--sog-min', type=float, default=0,
|
|
help='Minimum expected SOG in knots (default: 0)')
|
|
parser.add_argument('--sog-max', type=float, default=2.0,
|
|
help='Maximum expected SOG in knots (default: 2.0)')
|
|
parser.add_argument('-n', '--count', type=int, default=5,
|
|
help='Number of packets to analyze (default: 5)')
|
|
parser.add_argument('--interval', type=float, default=1.0,
|
|
help='Minimum interval between packets (default: 1.0)')
|
|
|
|
args = parser.parse_args()
|
|
|
|
signal.signal(signal.SIGINT, signal_handler)
|
|
signal.signal(signal.SIGTERM, signal_handler)
|
|
|
|
# Create sockets
|
|
sockets = []
|
|
for group, port in MULTICAST_GROUPS:
|
|
try:
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
if hasattr(socket, 'SO_REUSEPORT'):
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
|
sock.bind(('', port))
|
|
mreq = struct.pack("4s4s", socket.inet_aton(group), socket.inet_aton(args.interface))
|
|
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
|
sock.setblocking(False)
|
|
sockets.append((sock, group, port))
|
|
except Exception as e:
|
|
print(f"Error joining {group}:{port}: {e}")
|
|
|
|
if not sockets:
|
|
print("Error: Could not join any multicast groups")
|
|
sys.exit(1)
|
|
|
|
print(f"COG/SOG Field Finder")
|
|
print(f"====================")
|
|
print(f"Looking for COG: {args.cog_min}° - {args.cog_max}°")
|
|
print(f"Looking for SOG: {args.sog_min} - {args.sog_max} kts")
|
|
print(f"Analyzing {args.count} packets...")
|
|
print()
|
|
|
|
# Track all field values across packets
|
|
field_values: Dict[str, List[Tuple[float, Dict]]] = {}
|
|
|
|
packet_count = 0
|
|
analyzed = 0
|
|
last_time = 0
|
|
|
|
try:
|
|
while running and analyzed < args.count:
|
|
for sock, group, port in sockets:
|
|
try:
|
|
data, addr = sock.recvfrom(65535)
|
|
packet_count += 1
|
|
|
|
now = time.time()
|
|
if args.interval > 0 and (now - last_time) < args.interval:
|
|
continue
|
|
|
|
results = scan_packet(data)
|
|
if not results:
|
|
continue
|
|
|
|
analyzed += 1
|
|
last_time = now
|
|
|
|
# Collect values
|
|
for r in results:
|
|
path = r['path']
|
|
if path not in field_values:
|
|
field_values[path] = []
|
|
field_values[path].append((r['raw'], r['interps']))
|
|
|
|
except BlockingIOError:
|
|
continue
|
|
|
|
time.sleep(0.01)
|
|
|
|
finally:
|
|
for sock, _, _ in sockets:
|
|
sock.close()
|
|
|
|
# Analyze and display results
|
|
print(f"\n{'='*80}")
|
|
print(f"ANALYSIS - {analyzed} packets")
|
|
print(f"{'='*80}")
|
|
|
|
cog_candidates = []
|
|
sog_candidates = []
|
|
other_fields = []
|
|
|
|
for path in sorted(field_values.keys()):
|
|
values = field_values[path]
|
|
raw_vals = [v[0] for v in values]
|
|
|
|
if not raw_vals:
|
|
continue
|
|
|
|
min_raw = min(raw_vals)
|
|
max_raw = max(raw_vals)
|
|
avg_raw = sum(raw_vals) / len(raw_vals)
|
|
|
|
# Check if this could be COG (radians -> degrees in range)
|
|
cog_matches = 0
|
|
cog_degs = []
|
|
for raw, interps in values:
|
|
if 'deg' in interps:
|
|
deg = interps['deg']
|
|
cog_degs.append(deg)
|
|
cog_match = check_cog_match(raw, args.cog_min, args.cog_max)
|
|
if cog_match is not None:
|
|
cog_matches += 1
|
|
|
|
# Check if this could be SOG (m/s -> knots in range)
|
|
sog_matches = 0
|
|
sog_kts = []
|
|
for raw, interps in values:
|
|
if 'kts' in interps:
|
|
kts = interps['kts']
|
|
sog_kts.append(kts)
|
|
sog_match = check_sog_match(raw, args.sog_min, args.sog_max)
|
|
if sog_match is not None:
|
|
sog_matches += 1
|
|
|
|
# Categorize
|
|
if cog_matches == len(values) and cog_degs:
|
|
cog_candidates.append((path, cog_degs, raw_vals))
|
|
elif sog_matches == len(values) and sog_kts:
|
|
sog_candidates.append((path, sog_kts, raw_vals))
|
|
else:
|
|
other_fields.append((path, raw_vals, cog_degs, sog_kts))
|
|
|
|
# Print COG candidates
|
|
print(f"\n*** POTENTIAL COG FIELDS (all {analyzed} samples matched {args.cog_min}°-{args.cog_max}°) ***")
|
|
if cog_candidates:
|
|
for path, degs, raws in cog_candidates:
|
|
min_deg, max_deg = min(degs), max(degs)
|
|
print(f" {path}: {min_deg:.1f}° - {max_deg:.1f}° (raw: {min(raws):.4f} - {max(raws):.4f} rad)")
|
|
else:
|
|
print(" (none found)")
|
|
|
|
# Print SOG candidates
|
|
print(f"\n*** POTENTIAL SOG FIELDS (all {analyzed} samples matched {args.sog_min}-{args.sog_max} kts) ***")
|
|
if sog_candidates:
|
|
for path, kts_list, raws in sog_candidates:
|
|
min_kts, max_kts = min(kts_list), max(kts_list)
|
|
print(f" {path}: {min_kts:.2f} - {max_kts:.2f} kts (raw: {min(raws):.4f} - {max(raws):.4f} m/s)")
|
|
else:
|
|
print(" (none found)")
|
|
|
|
# Print other navigation-looking fields (small positive values)
|
|
print(f"\n*** OTHER NUMERIC FIELDS (may be COG/SOG with different interpretation) ***")
|
|
nav_fields = [(p, r, c, s) for p, r, c, s in other_fields
|
|
if len(r) > 0 and 0 < min(r) < 100 and max(r) < 1000]
|
|
|
|
for path, raws, cog_degs, sog_kts in sorted(nav_fields, key=lambda x: x[0]):
|
|
min_raw, max_raw = min(raws), max(raws)
|
|
info = f" {path}: raw {min_raw:.4f} - {max_raw:.4f}"
|
|
if cog_degs:
|
|
info += f" | as deg: {min(cog_degs):.1f}° - {max(cog_degs):.1f}°"
|
|
if sog_kts:
|
|
info += f" | as kts: {min(sog_kts):.2f} - {max(sog_kts):.2f}"
|
|
print(info)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|