Organizes 11 projects for Cerbo GX/Venus OS into a single repository: - axiom-nmea: Raymarine LightHouse protocol decoder - dbus-generator-ramp: Generator current ramp controller - dbus-lightning: Blitzortung lightning monitor - dbus-meteoblue-forecast: Meteoblue weather forecast - dbus-no-foreign-land: noforeignland.com tracking - dbus-tides: Tide prediction from depth + harmonics - dbus-vrm-history: VRM cloud history proxy - dbus-windy-station: Windy.com weather upload - mfd-custom-app: MFD app deployment package - venus-html5-app: Custom Victron HTML5 app fork - watermaker: Watermaker PLC control UI Adds root README, .gitignore, project template, and per-project .gitignore files. Sensitive config files excluded via .gitignore with .example templates provided. Made-with: Cursor
413 lines
13 KiB
Python
Executable File
413 lines
13 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
Field 5 Study - Comprehensive analysis of Field 5 subfields.
|
|
|
|
Field 5 appears to contain SOG/COG data based on dock testing.
|
|
This script collects extensive samples to document all subfields.
|
|
|
|
Usage:
|
|
python field5_study.py -i 198.18.5.5
|
|
python field5_study.py -i 198.18.5.5 --samples 100 --interval 0.2
|
|
"""
|
|
|
|
import argparse
|
|
import math
|
|
import os
|
|
import signal
|
|
import socket
|
|
import struct
|
|
import sys
|
|
import time
|
|
from collections import defaultdict
|
|
from dataclasses import dataclass, field
|
|
from datetime import datetime
|
|
from typing import Any, Dict, List, Optional, Tuple
|
|
|
|
# Add parent directory to path for library import
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
from raymarine_nmea.protocol.parser import ProtobufParser, ProtoField
|
|
from raymarine_nmea.protocol.constants import (
|
|
WIRE_VARINT, WIRE_FIXED64, WIRE_LENGTH, WIRE_FIXED32,
|
|
HEADER_SIZE, RAD_TO_DEG, MS_TO_KTS,
|
|
)
|
|
from raymarine_nmea.sensors import MULTICAST_GROUPS
|
|
|
|
running = True
|
|
|
|
|
|
def signal_handler(signum, frame):
|
|
global running
|
|
running = False
|
|
|
|
|
|
@dataclass
|
|
class FieldStats:
|
|
"""Statistics for a field across multiple samples."""
|
|
path: str
|
|
wire_type: str
|
|
values: List[float] = field(default_factory=list)
|
|
|
|
@property
|
|
def count(self) -> int:
|
|
return len(self.values)
|
|
|
|
@property
|
|
def min_val(self) -> float:
|
|
return min(self.values) if self.values else 0
|
|
|
|
@property
|
|
def max_val(self) -> float:
|
|
return max(self.values) if self.values else 0
|
|
|
|
@property
|
|
def range_val(self) -> float:
|
|
return self.max_val - self.min_val
|
|
|
|
@property
|
|
def mean(self) -> float:
|
|
return sum(self.values) / len(self.values) if self.values else 0
|
|
|
|
@property
|
|
def std_dev(self) -> float:
|
|
if len(self.values) < 2:
|
|
return 0
|
|
mean = self.mean
|
|
variance = sum((v - mean) ** 2 for v in self.values) / len(self.values)
|
|
return math.sqrt(variance)
|
|
|
|
|
|
def decode_float(raw: bytes) -> Optional[float]:
|
|
"""Decode 4 bytes as little-endian float."""
|
|
if len(raw) == 4:
|
|
try:
|
|
val = struct.unpack('<f', raw)[0]
|
|
if val == val: # NaN check
|
|
return val
|
|
except struct.error:
|
|
pass
|
|
return None
|
|
|
|
|
|
def decode_double(raw: bytes) -> Optional[float]:
|
|
"""Decode 8 bytes as little-endian double."""
|
|
if len(raw) == 8:
|
|
try:
|
|
val = struct.unpack('<d', raw)[0]
|
|
if val == val: # NaN check
|
|
return val
|
|
except struct.error:
|
|
pass
|
|
return None
|
|
|
|
|
|
def extract_field5(packet: bytes) -> Dict[str, Tuple[str, float]]:
|
|
"""Extract all Field 5 subfields from a packet."""
|
|
results = {}
|
|
if len(packet) < HEADER_SIZE + 10:
|
|
return results
|
|
|
|
proto_data = packet[HEADER_SIZE:]
|
|
parser = ProtobufParser(proto_data)
|
|
fields = parser.parse_message()
|
|
|
|
# Look for Field 5
|
|
if 5 not in fields:
|
|
return results
|
|
|
|
field5 = fields[5]
|
|
|
|
# If Field 5 is a nested message, extract children
|
|
if field5.children:
|
|
for child_num, child in field5.children.items():
|
|
path = f"5.{child_num}"
|
|
|
|
if child.wire_type == WIRE_FIXED32:
|
|
val = decode_float(child.value)
|
|
if val is not None:
|
|
results[path] = ('f32', val)
|
|
|
|
elif child.wire_type == WIRE_FIXED64:
|
|
val = decode_double(child.value)
|
|
if val is not None:
|
|
results[path] = ('f64', val)
|
|
|
|
elif child.wire_type == WIRE_VARINT:
|
|
results[path] = ('var', float(child.value))
|
|
|
|
# Check for deeper nesting
|
|
if child.children:
|
|
for subchild_num, subchild in child.children.items():
|
|
subpath = f"5.{child_num}.{subchild_num}"
|
|
if subchild.wire_type == WIRE_FIXED32:
|
|
val = decode_float(subchild.value)
|
|
if val is not None:
|
|
results[subpath] = ('f32', val)
|
|
elif subchild.wire_type == WIRE_FIXED64:
|
|
val = decode_double(subchild.value)
|
|
if val is not None:
|
|
results[subpath] = ('f64', val)
|
|
|
|
# Field 5 itself might be a scalar
|
|
elif field5.wire_type == WIRE_FIXED32:
|
|
val = decode_float(field5.value)
|
|
if val is not None:
|
|
results['5'] = ('f32', val)
|
|
|
|
elif field5.wire_type == WIRE_FIXED64:
|
|
val = decode_double(field5.value)
|
|
if val is not None:
|
|
results['5'] = ('f64', val)
|
|
|
|
return results
|
|
|
|
|
|
def interpret_value(val: float, wire_type: str) -> Dict[str, str]:
|
|
"""Generate possible interpretations of a value."""
|
|
interps = {}
|
|
|
|
# Angle (radians to degrees)
|
|
if 0 <= val <= 2 * math.pi + 0.5:
|
|
deg = (val * RAD_TO_DEG) % 360
|
|
interps['angle'] = f"{deg:.1f}°"
|
|
|
|
# Speed (m/s to knots)
|
|
if 0 <= val <= 100:
|
|
kts = val * MS_TO_KTS
|
|
interps['speed'] = f"{kts:.2f} kts"
|
|
|
|
# Small angle (degrees already)
|
|
if 0 <= val <= 360:
|
|
interps['deg_direct'] = f"{val:.1f}° (if already degrees)"
|
|
|
|
# Temperature (Kelvin)
|
|
if 250 <= val <= 350:
|
|
c = val - 273.15
|
|
interps['temp'] = f"{c:.1f}°C"
|
|
|
|
return interps
|
|
|
|
|
|
def main():
|
|
global running
|
|
|
|
parser = argparse.ArgumentParser(
|
|
description="Study Field 5 subfields comprehensively",
|
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
)
|
|
parser.add_argument('-i', '--interface', required=True,
|
|
help='Interface IP for Raymarine multicast')
|
|
parser.add_argument('-n', '--samples', type=int, default=50,
|
|
help='Number of samples to collect (default: 50)')
|
|
parser.add_argument('--interval', type=float, default=0.3,
|
|
help='Seconds between samples (default: 0.3)')
|
|
|
|
args = parser.parse_args()
|
|
|
|
signal.signal(signal.SIGINT, signal_handler)
|
|
signal.signal(signal.SIGTERM, signal_handler)
|
|
|
|
# Create sockets
|
|
sockets = []
|
|
for group, port in MULTICAST_GROUPS:
|
|
try:
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
if hasattr(socket, 'SO_REUSEPORT'):
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
|
sock.bind(('', port))
|
|
mreq = struct.pack("4s4s", socket.inet_aton(group), socket.inet_aton(args.interface))
|
|
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
|
sock.setblocking(False)
|
|
sockets.append((sock, group, port))
|
|
except Exception as e:
|
|
print(f"Warning: Could not join {group}:{port}: {e}")
|
|
|
|
if not sockets:
|
|
print("Error: Could not join any multicast groups")
|
|
sys.exit(1)
|
|
|
|
print("=" * 80)
|
|
print("FIELD 5 COMPREHENSIVE STUDY")
|
|
print("=" * 80)
|
|
print(f"Collecting {args.samples} samples at {args.interval}s intervals...")
|
|
print("-" * 80)
|
|
|
|
# Track by packet size
|
|
field5_by_size: Dict[int, Dict[str, FieldStats]] = defaultdict(dict)
|
|
packets_with_field5 = 0
|
|
packets_without_field5 = 0
|
|
total_packets = 0
|
|
last_sample_time_by_size: Dict[int, float] = defaultdict(float)
|
|
|
|
try:
|
|
samples_collected = 0
|
|
while running and samples_collected < args.samples:
|
|
for sock, group, port in sockets:
|
|
try:
|
|
data, addr = sock.recvfrom(65535)
|
|
pkt_size = len(data)
|
|
total_packets += 1
|
|
|
|
now = time.time()
|
|
if (now - last_sample_time_by_size[pkt_size]) < args.interval:
|
|
continue
|
|
|
|
results = extract_field5(data)
|
|
|
|
if results:
|
|
packets_with_field5 += 1
|
|
samples_collected += 1
|
|
last_sample_time_by_size[pkt_size] = now
|
|
|
|
for path, (wire_type, value) in results.items():
|
|
size_fields = field5_by_size[pkt_size]
|
|
if path not in size_fields:
|
|
size_fields[path] = FieldStats(path, wire_type, [])
|
|
size_fields[path].values.append(value)
|
|
|
|
pct = (samples_collected / args.samples) * 100
|
|
print(f"\r Collecting: {samples_collected}/{args.samples} ({pct:.0f}%)", end='', flush=True)
|
|
else:
|
|
packets_without_field5 += 1
|
|
|
|
except BlockingIOError:
|
|
continue
|
|
|
|
time.sleep(0.01)
|
|
|
|
finally:
|
|
for sock, _, _ in sockets:
|
|
sock.close()
|
|
|
|
print()
|
|
print()
|
|
|
|
# Summary
|
|
print("=" * 80)
|
|
print("FIELD 5 STUDY RESULTS")
|
|
print("=" * 80)
|
|
print(f" Total packets scanned: {total_packets}")
|
|
print(f" Packets with Field 5: {packets_with_field5}")
|
|
print(f" Packets without Field 5: {packets_without_field5}")
|
|
print()
|
|
|
|
if not field5_by_size:
|
|
print(" No Field 5 data found!")
|
|
sys.exit(1)
|
|
|
|
# Show Field 5 structure by packet size
|
|
print("=" * 80)
|
|
print("FIELD 5 SUBFIELDS BY PACKET SIZE")
|
|
print("=" * 80)
|
|
|
|
all_subfields = set()
|
|
for pkt_size in sorted(field5_by_size.keys()):
|
|
size_fields = field5_by_size[pkt_size]
|
|
all_subfields.update(size_fields.keys())
|
|
|
|
# For each packet size that has Field 5
|
|
for pkt_size in sorted(field5_by_size.keys()):
|
|
size_fields = field5_by_size[pkt_size]
|
|
if not size_fields:
|
|
continue
|
|
|
|
sample_count = max(s.count for s in size_fields.values())
|
|
print(f"\n--- {pkt_size} bytes ({sample_count} samples) ---")
|
|
print()
|
|
|
|
for path in sorted(size_fields.keys(), key=lambda x: [int(p) for p in x.split('.')]):
|
|
stats = size_fields[path]
|
|
|
|
print(f" {path} ({stats.wire_type}):")
|
|
print(f" Samples: {stats.count}")
|
|
print(f" Range: {stats.min_val:.6f} to {stats.max_val:.6f}")
|
|
print(f" Mean: {stats.mean:.6f}")
|
|
print(f" StdDev: {stats.std_dev:.6f}")
|
|
|
|
# Show interpretations
|
|
interps = interpret_value(stats.mean, stats.wire_type)
|
|
if interps:
|
|
print(f" Interpretations:")
|
|
for itype, ival in interps.items():
|
|
print(f" - As {itype}: {ival}")
|
|
|
|
# Behavioral analysis
|
|
if stats.std_dev < 0.001 and stats.count >= 3:
|
|
print(f" Behavior: CONSTANT")
|
|
elif stats.range_val > 3.0 and 0 <= stats.min_val <= 7:
|
|
range_deg = stats.range_val * RAD_TO_DEG
|
|
print(f" Behavior: HIGHLY VARIABLE ({range_deg:.0f}° range) - likely COG or heading")
|
|
elif stats.range_val > 0.01 and stats.max_val < 1.0:
|
|
range_kts = stats.range_val * MS_TO_KTS
|
|
print(f" Behavior: SMALL FLUCTUATION ({range_kts:.3f} kts range) - could be SOG")
|
|
|
|
print()
|
|
|
|
# Summary table
|
|
print("=" * 80)
|
|
print("FIELD 5 SUMMARY TABLE")
|
|
print("=" * 80)
|
|
print()
|
|
print(f" {'Subfield':<10} {'Type':<5} {'Min':>12} {'Max':>12} {'StdDev':>10} {'Behavior':<20} {'Likely Purpose'}")
|
|
print("-" * 95)
|
|
|
|
# Aggregate across all packet sizes for the summary
|
|
aggregated: Dict[str, FieldStats] = {}
|
|
for pkt_size, size_fields in field5_by_size.items():
|
|
for path, stats in size_fields.items():
|
|
if path not in aggregated:
|
|
aggregated[path] = FieldStats(path, stats.wire_type, [])
|
|
aggregated[path].values.extend(stats.values)
|
|
|
|
for path in sorted(aggregated.keys(), key=lambda x: [int(p) for p in x.split('.')]):
|
|
stats = aggregated[path]
|
|
|
|
# Determine behavior
|
|
if stats.std_dev < 0.001:
|
|
behavior = "Constant"
|
|
elif stats.range_val > 3.0:
|
|
behavior = f"Variable ({stats.range_val * RAD_TO_DEG:.0f}° range)"
|
|
elif stats.range_val > 0.01:
|
|
behavior = f"Fluctuating"
|
|
else:
|
|
behavior = "Near-constant"
|
|
|
|
# Guess purpose based on behavior and value range
|
|
purpose = "Unknown"
|
|
if stats.range_val > 3.0 and 0 <= stats.min_val <= 7:
|
|
purpose = "COG or heading"
|
|
elif stats.std_dev < 0.001 and 0.005 <= stats.mean <= 0.5:
|
|
purpose = "SOG (at dock)"
|
|
elif stats.std_dev < 0.001 and stats.mean > 10:
|
|
purpose = "Fixed parameter"
|
|
elif 0 <= stats.mean <= 0.2 and stats.max_val < 1:
|
|
purpose = "SOG candidate"
|
|
|
|
print(f" {path:<10} {stats.wire_type:<5} {stats.min_val:>12.4f} {stats.max_val:>12.4f} "
|
|
f"{stats.std_dev:>10.4f} {behavior:<20} {purpose}")
|
|
|
|
print()
|
|
print("=" * 80)
|
|
print("INTERPRETATION GUIDE")
|
|
print("=" * 80)
|
|
print("""
|
|
Based on dock behavior (SOG ~0, COG jumping wildly):
|
|
|
|
- COG (Course Over Ground): Look for fields with HIGH variance spanning
|
|
most of 0-2π radians (~0-360°). At dock, GPS-derived COG is unreliable
|
|
and jumps randomly.
|
|
|
|
- SOG (Speed Over Ground): Look for fields with small values (~0.01-0.1 m/s)
|
|
that are relatively constant at dock. May show slight fluctuation.
|
|
|
|
- Heading: May be similar to COG but derived from compass, so more stable.
|
|
|
|
- Fixed parameters: Constants like 0.05, 0.1, 11.93 may be configuration
|
|
values, damping factors, or display settings.
|
|
""")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|