Organizes 11 projects for Cerbo GX/Venus OS into a single repository: - axiom-nmea: Raymarine LightHouse protocol decoder - dbus-generator-ramp: Generator current ramp controller - dbus-lightning: Blitzortung lightning monitor - dbus-meteoblue-forecast: Meteoblue weather forecast - dbus-no-foreign-land: noforeignland.com tracking - dbus-tides: Tide prediction from depth + harmonics - dbus-vrm-history: VRM cloud history proxy - dbus-windy-station: Windy.com weather upload - mfd-custom-app: MFD app deployment package - venus-html5-app: Custom Victron HTML5 app fork - watermaker: Watermaker PLC control UI Adds root README, .gitignore, project template, and per-project .gitignore files. Sensitive config files excluded via .gitignore with .example templates provided. Made-with: Cursor
807 lines
27 KiB
Python
Executable File
807 lines
27 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
Raymarine LightHouse Network Decoder
|
|
|
|
Decodes sensor data from Raymarine AXIOM MFDs broadcast over UDP multicast.
|
|
The protocol uses Protocol Buffers binary encoding (not standard NMEA 0183).
|
|
|
|
Usage:
|
|
python raymarine_decoder.py -i 198.18.5.5
|
|
python raymarine_decoder.py -i 198.18.5.5 --json
|
|
python raymarine_decoder.py --pcap raymarine_sample.pcap
|
|
|
|
Multicast Groups:
|
|
226.192.206.98:2561 - Navigation sensors
|
|
226.192.206.99:2562 - Heartbeat/status
|
|
226.192.206.102:2565 - Mixed sensor data
|
|
226.192.219.0:3221 - Display sync
|
|
|
|
Author: Reverse-engineered from Raymarine network captures
|
|
"""
|
|
|
|
import argparse
|
|
import json
|
|
import os
|
|
import socket
|
|
import struct
|
|
import sys
|
|
import threading
|
|
import time
|
|
from collections import deque
|
|
from datetime import datetime
|
|
from typing import Dict, List, Optional, Tuple, Any
|
|
|
|
|
|
# Raymarine multicast configuration
|
|
MULTICAST_GROUPS = [
|
|
("226.192.206.98", 2561), # Navigation sensors
|
|
("226.192.206.99", 2562), # Heartbeat/status
|
|
("226.192.206.102", 2565), # Mixed sensor data (primary)
|
|
("226.192.219.0", 3221), # Display sync
|
|
]
|
|
|
|
# Conversion constants
|
|
RADIANS_TO_DEGREES = 57.2957795131
|
|
MS_TO_KNOTS = 1.94384449
|
|
FEET_TO_METERS = 0.3048
|
|
|
|
|
|
class SensorData:
|
|
"""Holds the current state of all decoded sensor values."""
|
|
|
|
def __init__(self):
|
|
self.latitude: Optional[float] = None
|
|
self.longitude: Optional[float] = None
|
|
self.heading_deg: Optional[float] = None
|
|
self.wind_speed_kts: Optional[float] = None
|
|
self.wind_direction_deg: Optional[float] = None
|
|
self.depth_ft: Optional[float] = None
|
|
self.water_temp_c: Optional[float] = None
|
|
self.air_temp_c: Optional[float] = None
|
|
self.sog_kts: Optional[float] = None # Speed over ground
|
|
self.cog_deg: Optional[float] = None # Course over ground
|
|
|
|
# Timestamps for freshness tracking
|
|
self.gps_time: float = 0
|
|
self.heading_time: float = 0
|
|
self.wind_time: float = 0
|
|
self.depth_time: float = 0
|
|
self.temp_time: float = 0
|
|
|
|
# Statistics
|
|
self.packet_count: int = 0
|
|
self.gps_count: int = 0
|
|
self.start_time: float = time.time()
|
|
|
|
# Thread safety
|
|
self.lock = threading.Lock()
|
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
|
"""Convert sensor data to dictionary for JSON output."""
|
|
with self.lock:
|
|
return {
|
|
"timestamp": datetime.now().isoformat(),
|
|
"position": {
|
|
"latitude": self.latitude,
|
|
"longitude": self.longitude,
|
|
"age_seconds": time.time() - self.gps_time if self.gps_time else None,
|
|
},
|
|
"navigation": {
|
|
"heading_deg": self.heading_deg,
|
|
"sog_kts": self.sog_kts,
|
|
"cog_deg": self.cog_deg,
|
|
},
|
|
"wind": {
|
|
"speed_kts": self.wind_speed_kts,
|
|
"direction_deg": self.wind_direction_deg,
|
|
},
|
|
"depth": {
|
|
"feet": self.depth_ft,
|
|
"meters": self.depth_ft * FEET_TO_METERS if self.depth_ft else None,
|
|
},
|
|
"temperature": {
|
|
"water_c": self.water_temp_c,
|
|
"air_c": self.air_temp_c,
|
|
},
|
|
"statistics": {
|
|
"packets_received": self.packet_count,
|
|
"gps_packets": self.gps_count,
|
|
"uptime_seconds": time.time() - self.start_time,
|
|
}
|
|
}
|
|
|
|
|
|
class ProtobufDecoder:
|
|
"""
|
|
Decodes Raymarine's protobuf-like binary format.
|
|
|
|
Wire types:
|
|
0 = Varint
|
|
1 = 64-bit (fixed64, double)
|
|
2 = Length-delimited (string, bytes, nested message)
|
|
5 = 32-bit (fixed32, float)
|
|
"""
|
|
|
|
@staticmethod
|
|
def decode_varint(data: bytes, offset: int) -> Tuple[int, int]:
|
|
"""Decode a protobuf varint, return (value, bytes_consumed)."""
|
|
result = 0
|
|
shift = 0
|
|
consumed = 0
|
|
|
|
while offset + consumed < len(data):
|
|
byte = data[offset + consumed]
|
|
result |= (byte & 0x7F) << shift
|
|
consumed += 1
|
|
if not (byte & 0x80):
|
|
break
|
|
shift += 7
|
|
if shift > 63:
|
|
break
|
|
|
|
return result, consumed
|
|
|
|
@staticmethod
|
|
def decode_double(data: bytes, offset: int) -> Optional[float]:
|
|
"""Decode a little-endian 64-bit double."""
|
|
if offset + 8 > len(data):
|
|
return None
|
|
try:
|
|
return struct.unpack('<d', data[offset:offset+8])[0]
|
|
except struct.error:
|
|
return None
|
|
|
|
@staticmethod
|
|
def decode_float(data: bytes, offset: int) -> Optional[float]:
|
|
"""Decode a little-endian 32-bit float."""
|
|
if offset + 4 > len(data):
|
|
return None
|
|
try:
|
|
return struct.unpack('<f', data[offset:offset+4])[0]
|
|
except struct.error:
|
|
return None
|
|
|
|
@staticmethod
|
|
def is_valid_latitude(val: float) -> bool:
|
|
"""Check if value is a valid latitude."""
|
|
return -90 <= val <= 90
|
|
|
|
@staticmethod
|
|
def is_valid_longitude(val: float) -> bool:
|
|
"""Check if value is a valid longitude."""
|
|
return -180 <= val <= 180
|
|
|
|
@staticmethod
|
|
def is_valid_angle_radians(val: float) -> bool:
|
|
"""Check if value is a valid angle in radians (0 to 2*pi)."""
|
|
return 0 <= val <= 6.5 # Slightly more than 2*pi for tolerance
|
|
|
|
@staticmethod
|
|
def is_valid_speed_ms(val: float) -> bool:
|
|
"""Check if value is a reasonable speed in m/s (0 to ~50 m/s = ~100 kts)."""
|
|
return 0 <= val <= 60
|
|
|
|
|
|
class RaymarineDecoder:
|
|
"""
|
|
Main decoder for Raymarine network packets.
|
|
|
|
Uses GPS-anchored parsing strategy:
|
|
1. Find GPS using reliable 0x09/0x11 pattern at offset ~0x0032
|
|
2. Extract other values at known offsets relative to GPS or packet start
|
|
"""
|
|
|
|
def __init__(self, sensor_data: SensorData, verbose: bool = False):
|
|
self.sensor_data = sensor_data
|
|
self.verbose = verbose
|
|
self.pb = ProtobufDecoder()
|
|
|
|
# Packet size categories for different parsing strategies
|
|
self.SMALL_PACKETS = range(0, 200)
|
|
self.MEDIUM_PACKETS = range(200, 600)
|
|
self.LARGE_PACKETS = range(600, 1200)
|
|
self.XLARGE_PACKETS = range(1200, 3000)
|
|
|
|
def decode_packet(self, data: bytes, source: Tuple[str, int]) -> bool:
|
|
"""
|
|
Decode a single UDP packet.
|
|
Returns True if any useful data was extracted.
|
|
"""
|
|
with self.sensor_data.lock:
|
|
self.sensor_data.packet_count += 1
|
|
|
|
if len(data) < 50:
|
|
return False # Too small to contain useful data
|
|
|
|
decoded_something = False
|
|
|
|
# Try GPS extraction (most reliable)
|
|
if self._extract_gps(data):
|
|
decoded_something = True
|
|
|
|
# Try extracting other sensor data based on packet size
|
|
pkt_len = len(data)
|
|
|
|
if pkt_len in self.LARGE_PACKETS or pkt_len in self.XLARGE_PACKETS:
|
|
# Large packets typically have full sensor data
|
|
if self._extract_navigation(data):
|
|
decoded_something = True
|
|
if self._extract_wind(data):
|
|
decoded_something = True
|
|
if self._extract_depth(data):
|
|
decoded_something = True
|
|
if self._extract_temperature(data):
|
|
decoded_something = True
|
|
|
|
elif pkt_len in self.MEDIUM_PACKETS:
|
|
# Medium packets may have partial data
|
|
if self._extract_wind(data):
|
|
decoded_something = True
|
|
if self._extract_depth(data):
|
|
decoded_something = True
|
|
|
|
return decoded_something
|
|
|
|
def _extract_gps(self, data: bytes) -> bool:
|
|
"""
|
|
Extract GPS coordinates using the 0x09/0x11 pattern.
|
|
|
|
Pattern:
|
|
0x09 [8-byte latitude double] 0x11 [8-byte longitude double]
|
|
|
|
Returns True if valid GPS was found.
|
|
"""
|
|
# Scan for the GPS pattern starting around offset 0x30
|
|
search_start = 0x20
|
|
search_end = min(len(data) - 18, 0x100)
|
|
|
|
for offset in range(search_start, search_end):
|
|
if data[offset] != 0x09:
|
|
continue
|
|
|
|
# Check if 0x11 follows at expected position
|
|
lon_tag_offset = offset + 9
|
|
if lon_tag_offset >= len(data) or data[lon_tag_offset] != 0x11:
|
|
continue
|
|
|
|
# Decode latitude and longitude
|
|
lat = self.pb.decode_double(data, offset + 1)
|
|
lon = self.pb.decode_double(data, lon_tag_offset + 1)
|
|
|
|
if lat is None or lon is None:
|
|
continue
|
|
|
|
# Validate coordinates
|
|
if not self.pb.is_valid_latitude(lat) or not self.pb.is_valid_longitude(lon):
|
|
continue
|
|
|
|
# Additional sanity check: filter out obviously wrong values
|
|
# Most readings should be reasonable coordinates, not near 0,0
|
|
if abs(lat) < 0.1 and abs(lon) < 0.1:
|
|
continue
|
|
|
|
with self.sensor_data.lock:
|
|
self.sensor_data.latitude = lat
|
|
self.sensor_data.longitude = lon
|
|
self.sensor_data.gps_time = time.time()
|
|
self.sensor_data.gps_count += 1
|
|
|
|
if self.verbose:
|
|
print(f"GPS: {lat:.6f}, {lon:.6f} (offset 0x{offset:04x})")
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
def _extract_navigation(self, data: bytes) -> bool:
|
|
"""
|
|
Extract heading, SOG, COG from packet.
|
|
These are typically 32-bit floats in radians.
|
|
"""
|
|
found = False
|
|
|
|
# Look for heading at known offsets for large packets
|
|
heading_offsets = [0x006f, 0x00d4, 0x0073, 0x00d8]
|
|
|
|
for offset in heading_offsets:
|
|
if offset + 4 > len(data):
|
|
continue
|
|
|
|
# Check for float tag (wire type 5)
|
|
if offset > 0 and (data[offset - 1] & 0x07) == 5:
|
|
val = self.pb.decode_float(data, offset)
|
|
if val and self.pb.is_valid_angle_radians(val):
|
|
heading_deg = val * RADIANS_TO_DEGREES
|
|
with self.sensor_data.lock:
|
|
self.sensor_data.heading_deg = heading_deg % 360
|
|
self.sensor_data.heading_time = time.time()
|
|
found = True
|
|
break
|
|
|
|
return found
|
|
|
|
def _extract_wind(self, data: bytes) -> bool:
|
|
"""
|
|
Extract wind speed and direction.
|
|
Wind speed is in m/s, direction in radians.
|
|
|
|
Known offsets by packet size (discovered via pcap analysis):
|
|
- 344 bytes: speed @ 0x00a5, dir @ 0x00a0
|
|
- 446 bytes: speed @ 0x00ac, dir @ 0x00a7
|
|
- 788 bytes: speed @ 0x00ca, dir @ 0x00c5
|
|
- 888 bytes: speed @ 0x00ca, dir @ 0x00c5
|
|
- 931 bytes: speed @ 0x00ca, dir @ 0x00c5
|
|
- 1031 bytes: speed @ 0x00ca, dir @ 0x00c5
|
|
- 1472 bytes: speed @ 0x0101, dir @ 0x00fc
|
|
|
|
Note: 878-byte packets do NOT contain wind data at these offsets.
|
|
"""
|
|
pkt_len = len(data)
|
|
|
|
# Define offset pairs (speed_offset, dir_offset) for SPECIFIC packet sizes
|
|
# Only process packet sizes known to contain wind data
|
|
offset_pairs = None
|
|
|
|
if pkt_len == 344:
|
|
offset_pairs = [(0x00a5, 0x00a0)]
|
|
elif pkt_len == 446:
|
|
offset_pairs = [(0x00ac, 0x00a7)]
|
|
elif pkt_len in (788, 888, 931, 1031):
|
|
offset_pairs = [(0x00ca, 0x00c5)]
|
|
elif pkt_len == 1472:
|
|
offset_pairs = [(0x0101, 0x00fc)]
|
|
|
|
# Skip unknown packet sizes to avoid garbage values
|
|
if offset_pairs is None:
|
|
return False
|
|
|
|
for speed_offset, dir_offset in offset_pairs:
|
|
if speed_offset + 4 > pkt_len or dir_offset + 4 > pkt_len:
|
|
continue
|
|
|
|
speed_val = self.pb.decode_float(data, speed_offset)
|
|
dir_val = self.pb.decode_float(data, dir_offset)
|
|
|
|
if speed_val is None or dir_val is None:
|
|
continue
|
|
|
|
# Validate: speed 0.1-50 m/s (~0.2-97 kts), direction 0-2*pi radians
|
|
if not (0.1 < speed_val < 50):
|
|
continue
|
|
if not (0 <= dir_val <= 6.5):
|
|
continue
|
|
|
|
# Convert and store
|
|
with self.sensor_data.lock:
|
|
self.sensor_data.wind_speed_kts = speed_val * MS_TO_KNOTS
|
|
self.sensor_data.wind_direction_deg = (dir_val * RADIANS_TO_DEGREES) % 360
|
|
self.sensor_data.wind_time = time.time()
|
|
return True
|
|
|
|
return False
|
|
|
|
def _extract_depth(self, data: bytes) -> bool:
|
|
"""
|
|
Extract depth value (in feet, stored as 64-bit double).
|
|
Depth is tagged with field 5 (0x29) or field 11 (0x59) wire type 1.
|
|
"""
|
|
# Search for depth by looking for wire type 1 tags with field 5 or 11
|
|
# Tag format: (field_number << 3) | wire_type
|
|
# Field 5, wire type 1 = (5 << 3) | 1 = 0x29
|
|
# Field 11, wire type 1 = (11 << 3) | 1 = 0x59
|
|
|
|
depth_tags = [0x29, 0x59] # Field 5 and 11, wire type 1
|
|
|
|
for offset in range(0x40, min(len(data) - 9, 0x300)):
|
|
tag = data[offset]
|
|
if tag not in depth_tags:
|
|
continue
|
|
|
|
val = self.pb.decode_double(data, offset + 1)
|
|
if val is None:
|
|
continue
|
|
|
|
# Reasonable depth range: 0.5 to 500 feet
|
|
if 0.5 < val < 500:
|
|
with self.sensor_data.lock:
|
|
self.sensor_data.depth_ft = val
|
|
self.sensor_data.depth_time = time.time()
|
|
return True
|
|
|
|
# Fallback: scan for any reasonable depth-like double values
|
|
# in larger packets where we have more sensor data
|
|
if len(data) > 800:
|
|
for offset in range(0x80, min(len(data) - 9, 0x200)):
|
|
# Only check positions that look like protobuf fields
|
|
tag = data[offset]
|
|
if (tag & 0x07) != 1: # Wire type 1 (double)
|
|
continue
|
|
|
|
val = self.pb.decode_double(data, offset + 1)
|
|
if val is None:
|
|
continue
|
|
|
|
# Typical depth range for Florida Keys: 3-50 feet
|
|
if 2 < val < 100:
|
|
with self.sensor_data.lock:
|
|
self.sensor_data.depth_ft = val
|
|
self.sensor_data.depth_time = time.time()
|
|
return True
|
|
|
|
return False
|
|
|
|
def _extract_temperature(self, data: bytes) -> bool:
|
|
"""
|
|
Extract temperature values (water and air).
|
|
Temperature encoding is not yet fully understood.
|
|
Might be in Kelvin, Celsius, or Fahrenheit.
|
|
|
|
Note: Temperature extraction is experimental and may not produce
|
|
reliable results without the proprietary protobuf schema.
|
|
"""
|
|
# Temperature extraction is currently unreliable
|
|
# The protocol documentation notes temperature has not been found
|
|
# Returning False to avoid displaying garbage values
|
|
# TODO: Implement when temperature field offsets are discovered
|
|
|
|
# Search for temperature-like values with stricter validation
|
|
# Only look at specific wire type 1 (double) fields
|
|
for offset in range(0x50, min(len(data) - 9, 0x200)):
|
|
# Must be preceded by a wire type 1 tag
|
|
tag = data[offset]
|
|
if (tag & 0x07) != 1: # Wire type 1 = 64-bit
|
|
continue
|
|
|
|
field_num = tag >> 3
|
|
# Temperature fields are likely in a reasonable field number range
|
|
if field_num < 1 or field_num > 30:
|
|
continue
|
|
|
|
val = self.pb.decode_double(data, offset + 1)
|
|
if val is None:
|
|
continue
|
|
|
|
# Very strict validation for Kelvin range (water temp 15-35°C)
|
|
if 288 < val < 308: # 15°C to 35°C in Kelvin
|
|
temp_c = val - 273.15
|
|
with self.sensor_data.lock:
|
|
if self.sensor_data.water_temp_c is None:
|
|
self.sensor_data.water_temp_c = temp_c
|
|
self.sensor_data.temp_time = time.time()
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
class MulticastListener:
|
|
"""
|
|
Listens on multiple multicast groups and feeds packets to the decoder.
|
|
"""
|
|
|
|
def __init__(self, decoder: RaymarineDecoder, interface_ip: str,
|
|
groups: List[Tuple[str, int]] = None):
|
|
self.decoder = decoder
|
|
self.interface_ip = interface_ip
|
|
self.groups = groups or MULTICAST_GROUPS
|
|
self.sockets: List[socket.socket] = []
|
|
self.running = False
|
|
self.threads: List[threading.Thread] = []
|
|
|
|
def _create_socket(self, group: str, port: int) -> Optional[socket.socket]:
|
|
"""Create and configure a multicast socket."""
|
|
try:
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
|
|
# Try SO_REUSEPORT if available (Linux)
|
|
if hasattr(socket, 'SO_REUSEPORT'):
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
|
|
|
# Bind to the port
|
|
sock.bind(('', port))
|
|
|
|
# Join multicast group
|
|
mreq = struct.pack("4s4s",
|
|
socket.inet_aton(group),
|
|
socket.inet_aton(self.interface_ip))
|
|
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
|
|
|
# Set receive timeout
|
|
sock.settimeout(1.0)
|
|
|
|
return sock
|
|
except Exception as e:
|
|
print(f"Error creating socket for {group}:{port}: {e}", file=sys.stderr)
|
|
return None
|
|
|
|
def _listener_thread(self, sock: socket.socket, group: str, port: int):
|
|
"""Thread function to listen on a single multicast group."""
|
|
while self.running:
|
|
try:
|
|
data, addr = sock.recvfrom(65535)
|
|
self.decoder.decode_packet(data, addr)
|
|
except socket.timeout:
|
|
continue
|
|
except Exception as e:
|
|
if self.running:
|
|
print(f"Error receiving on {group}:{port}: {e}", file=sys.stderr)
|
|
|
|
def start(self):
|
|
"""Start listening on all multicast groups."""
|
|
self.running = True
|
|
|
|
for group, port in self.groups:
|
|
sock = self._create_socket(group, port)
|
|
if sock:
|
|
self.sockets.append(sock)
|
|
thread = threading.Thread(
|
|
target=self._listener_thread,
|
|
args=(sock, group, port),
|
|
daemon=True
|
|
)
|
|
thread.start()
|
|
self.threads.append(thread)
|
|
print(f"Listening on {group}:{port}")
|
|
|
|
if not self.sockets:
|
|
raise RuntimeError("Failed to create any multicast sockets")
|
|
|
|
def stop(self):
|
|
"""Stop listening and clean up."""
|
|
self.running = False
|
|
|
|
for thread in self.threads:
|
|
thread.join(timeout=2.0)
|
|
|
|
for sock in self.sockets:
|
|
try:
|
|
sock.close()
|
|
except Exception:
|
|
pass
|
|
|
|
self.sockets = []
|
|
self.threads = []
|
|
|
|
|
|
class PcapReader:
|
|
"""
|
|
Read packets from a pcap file for offline analysis.
|
|
Supports pcap format (not pcapng).
|
|
"""
|
|
|
|
PCAP_MAGIC = 0xa1b2c3d4
|
|
PCAP_MAGIC_SWAPPED = 0xd4c3b2a1
|
|
|
|
def __init__(self, filename: str):
|
|
self.filename = filename
|
|
self.swapped = False
|
|
|
|
def read_packets(self):
|
|
"""Generator that yields (timestamp, data) tuples."""
|
|
with open(self.filename, 'rb') as f:
|
|
# Read global header
|
|
header = f.read(24)
|
|
if len(header) < 24:
|
|
raise ValueError("Invalid pcap file: too short")
|
|
|
|
magic = struct.unpack('<I', header[0:4])[0]
|
|
if magic == self.PCAP_MAGIC:
|
|
self.swapped = False
|
|
elif magic == self.PCAP_MAGIC_SWAPPED:
|
|
self.swapped = True
|
|
else:
|
|
raise ValueError(f"Invalid pcap magic: 0x{magic:08x}")
|
|
|
|
endian = '>' if self.swapped else '<'
|
|
|
|
# Read packets
|
|
while True:
|
|
pkt_header = f.read(16)
|
|
if len(pkt_header) < 16:
|
|
break
|
|
|
|
ts_sec, ts_usec, incl_len, orig_len = struct.unpack(
|
|
f'{endian}IIII', pkt_header
|
|
)
|
|
|
|
pkt_data = f.read(incl_len)
|
|
if len(pkt_data) < incl_len:
|
|
break
|
|
|
|
# Skip Ethernet header (14 bytes) and IP header (20 bytes min)
|
|
# and UDP header (8 bytes) to get to payload
|
|
if len(pkt_data) > 42:
|
|
# Check for IPv4
|
|
if pkt_data[12:14] == b'\x08\x00':
|
|
ip_header_len = (pkt_data[14] & 0x0F) * 4
|
|
udp_start = 14 + ip_header_len
|
|
payload_start = udp_start + 8
|
|
|
|
if payload_start < len(pkt_data):
|
|
# Extract source IP
|
|
src_ip = '.'.join(str(b) for b in pkt_data[26:30])
|
|
src_port = struct.unpack('!H', pkt_data[udp_start:udp_start+2])[0]
|
|
|
|
payload = pkt_data[payload_start:]
|
|
yield (ts_sec + ts_usec / 1e6, payload, (src_ip, src_port))
|
|
|
|
|
|
def format_lat_lon(lat: float, lon: float) -> str:
|
|
"""Format coordinates as degrees and decimal minutes."""
|
|
lat_dir = 'N' if lat >= 0 else 'S'
|
|
lon_dir = 'E' if lon >= 0 else 'W'
|
|
|
|
lat = abs(lat)
|
|
lon = abs(lon)
|
|
|
|
lat_deg = int(lat)
|
|
lat_min = (lat - lat_deg) * 60
|
|
|
|
lon_deg = int(lon)
|
|
lon_min = (lon - lon_deg) * 60
|
|
|
|
return f"{lat_deg:3d}° {lat_min:06.3f}' {lat_dir}, {lon_deg:3d}° {lon_min:06.3f}' {lon_dir}"
|
|
|
|
|
|
def display_dashboard(sensor_data: SensorData):
|
|
"""Display a simple text dashboard."""
|
|
now = time.time()
|
|
|
|
# Clear screen
|
|
print("\033[2J\033[H", end="")
|
|
|
|
# Header
|
|
timestamp = datetime.now().strftime("%H:%M:%S")
|
|
print("=" * 70)
|
|
print(f" RAYMARINE DECODER {timestamp}")
|
|
print("=" * 70)
|
|
|
|
with sensor_data.lock:
|
|
# GPS
|
|
if sensor_data.latitude is not None and sensor_data.longitude is not None:
|
|
age = now - sensor_data.gps_time
|
|
fresh = "OK" if age < 5 else "STALE"
|
|
pos_str = format_lat_lon(sensor_data.latitude, sensor_data.longitude)
|
|
print(f" GPS: {pos_str} [{fresh}]")
|
|
else:
|
|
print(" GPS: No data")
|
|
|
|
# Heading
|
|
if sensor_data.heading_deg is not None:
|
|
age = now - sensor_data.heading_time
|
|
fresh = "OK" if age < 5 else "STALE"
|
|
print(f" Heading: {sensor_data.heading_deg:6.1f}° [{fresh}]")
|
|
else:
|
|
print(" Heading: No data")
|
|
|
|
# Wind
|
|
if sensor_data.wind_speed_kts is not None:
|
|
age = now - sensor_data.wind_time
|
|
fresh = "OK" if age < 5 else "STALE"
|
|
dir_str = f"@ {sensor_data.wind_direction_deg:.0f}°" if sensor_data.wind_direction_deg else ""
|
|
print(f" Wind: {sensor_data.wind_speed_kts:6.1f} kts {dir_str} [{fresh}]")
|
|
else:
|
|
print(" Wind: No data")
|
|
|
|
# Depth
|
|
if sensor_data.depth_ft is not None:
|
|
age = now - sensor_data.depth_time
|
|
fresh = "OK" if age < 5 else "STALE"
|
|
depth_m = sensor_data.depth_ft * FEET_TO_METERS
|
|
print(f" Depth: {sensor_data.depth_ft:6.1f} ft ({depth_m:.1f} m) [{fresh}]")
|
|
else:
|
|
print(" Depth: No data")
|
|
|
|
# Temperature
|
|
if sensor_data.water_temp_c is not None or sensor_data.air_temp_c is not None:
|
|
water = f"{sensor_data.water_temp_c:.1f}°C" if sensor_data.water_temp_c else "---"
|
|
air = f"{sensor_data.air_temp_c:.1f}°C" if sensor_data.air_temp_c else "---"
|
|
print(f" Temp: Water: {water} Air: {air}")
|
|
else:
|
|
print(" Temp: No data")
|
|
|
|
print("-" * 70)
|
|
uptime = now - sensor_data.start_time
|
|
print(f" Packets: {sensor_data.packet_count} GPS fixes: {sensor_data.gps_count} Uptime: {uptime:.0f}s")
|
|
|
|
print("=" * 70)
|
|
print(" Press Ctrl+C to exit")
|
|
|
|
|
|
def output_json(sensor_data: SensorData):
|
|
"""Output sensor data as JSON."""
|
|
print(json.dumps(sensor_data.to_dict(), indent=2))
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(
|
|
description="Decode Raymarine LightHouse network data",
|
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
epilog="""
|
|
Examples:
|
|
%(prog)s -i 198.18.5.5 Live capture with dashboard
|
|
%(prog)s -i 198.18.5.5 --json Live capture with JSON output
|
|
%(prog)s --pcap capture.pcap Analyze pcap file
|
|
|
|
Multicast Groups:
|
|
226.192.206.98:2561 Navigation sensors
|
|
226.192.206.99:2562 Heartbeat/status
|
|
226.192.206.102:2565 Mixed sensor data
|
|
226.192.219.0:3221 Display sync
|
|
"""
|
|
)
|
|
|
|
parser.add_argument('-i', '--interface',
|
|
help='Interface IP address for multicast binding')
|
|
parser.add_argument('--pcap',
|
|
help='Read from pcap file instead of live capture')
|
|
parser.add_argument('--json', action='store_true',
|
|
help='Output as JSON instead of dashboard')
|
|
parser.add_argument('--json-interval', type=float, default=1.0,
|
|
help='JSON output interval in seconds (default: 1.0)')
|
|
parser.add_argument('-v', '--verbose', action='store_true',
|
|
help='Verbose output')
|
|
parser.add_argument('--group', action='append', nargs=2,
|
|
metavar=('IP', 'PORT'),
|
|
help='Additional multicast group to listen on')
|
|
|
|
args = parser.parse_args()
|
|
|
|
# Validate arguments
|
|
if not args.pcap and not args.interface:
|
|
parser.error("Either --interface or --pcap is required")
|
|
|
|
# Initialize sensor data and decoder
|
|
sensor_data = SensorData()
|
|
decoder = RaymarineDecoder(sensor_data, verbose=args.verbose)
|
|
|
|
# Add custom groups if specified
|
|
groups = list(MULTICAST_GROUPS)
|
|
if args.group:
|
|
for ip, port in args.group:
|
|
groups.append((ip, int(port)))
|
|
|
|
if args.pcap:
|
|
# Pcap file analysis
|
|
print(f"Reading from {args.pcap}...")
|
|
reader = PcapReader(args.pcap)
|
|
|
|
packet_count = 0
|
|
for ts, data, source in reader.read_packets():
|
|
decoder.decode_packet(data, source)
|
|
packet_count += 1
|
|
|
|
print(f"\nProcessed {packet_count} packets")
|
|
print("\nFinal sensor state:")
|
|
print(json.dumps(sensor_data.to_dict(), indent=2))
|
|
|
|
else:
|
|
# Live capture
|
|
listener = MulticastListener(decoder, args.interface, groups)
|
|
|
|
try:
|
|
listener.start()
|
|
print(f"\nListening on interface {args.interface}")
|
|
print("Waiting for data...\n")
|
|
|
|
while True:
|
|
if args.json:
|
|
output_json(sensor_data)
|
|
else:
|
|
display_dashboard(sensor_data)
|
|
|
|
time.sleep(args.json_interval if args.json else 0.5)
|
|
|
|
except KeyboardInterrupt:
|
|
print("\n\nStopping...")
|
|
finally:
|
|
listener.stop()
|
|
|
|
if not args.json:
|
|
print("\nFinal sensor state:")
|
|
print(json.dumps(sensor_data.to_dict(), indent=2))
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|