Organizes 11 projects for Cerbo GX/Venus OS into a single repository: - axiom-nmea: Raymarine LightHouse protocol decoder - dbus-generator-ramp: Generator current ramp controller - dbus-lightning: Blitzortung lightning monitor - dbus-meteoblue-forecast: Meteoblue weather forecast - dbus-no-foreign-land: noforeignland.com tracking - dbus-tides: Tide prediction from depth + harmonics - dbus-vrm-history: VRM cloud history proxy - dbus-windy-station: Windy.com weather upload - mfd-custom-app: MFD app deployment package - venus-html5-app: Custom Victron HTML5 app fork - watermaker: Watermaker PLC control UI Adds root README, .gitignore, project template, and per-project .gitignore files. Sensitive config files excluded via .gitignore with .example templates provided. Made-with: Cursor
256 lines
7.9 KiB
Python
Executable File
256 lines
7.9 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
Tank Debug - Dump raw Field 16 entries to find missing IDs.
|
|
"""
|
|
|
|
import struct
|
|
import socket
|
|
import time
|
|
import threading
|
|
|
|
WIRE_VARINT = 0
|
|
WIRE_FIXED64 = 1
|
|
WIRE_LENGTH = 2
|
|
WIRE_FIXED32 = 5
|
|
|
|
HEADER_SIZE = 20
|
|
|
|
MULTICAST_GROUPS = [
|
|
("226.192.206.102", 2565), # Main sensor data with tanks
|
|
]
|
|
|
|
|
|
class ProtobufParser:
|
|
def __init__(self, data: bytes):
|
|
self.data = data
|
|
self.pos = 0
|
|
|
|
def remaining(self):
|
|
return len(self.data) - self.pos
|
|
|
|
def read_varint(self) -> int:
|
|
result = 0
|
|
shift = 0
|
|
while self.pos < len(self.data):
|
|
byte = self.data[self.pos]
|
|
self.pos += 1
|
|
result |= (byte & 0x7F) << shift
|
|
if not (byte & 0x80):
|
|
break
|
|
shift += 7
|
|
return result
|
|
|
|
def read_fixed32(self) -> bytes:
|
|
val = self.data[self.pos:self.pos + 4]
|
|
self.pos += 4
|
|
return val
|
|
|
|
def read_fixed64(self) -> bytes:
|
|
val = self.data[self.pos:self.pos + 8]
|
|
self.pos += 8
|
|
return val
|
|
|
|
def read_length_delimited(self) -> bytes:
|
|
length = self.read_varint()
|
|
val = self.data[self.pos:self.pos + length]
|
|
self.pos += length
|
|
return val
|
|
|
|
def parse_all_field16(self):
|
|
"""Parse and collect ALL Field 16 entries with full detail."""
|
|
entries = []
|
|
|
|
while self.pos < len(self.data):
|
|
if self.remaining() < 1:
|
|
break
|
|
try:
|
|
start_pos = self.pos
|
|
tag = self.read_varint()
|
|
field_num = tag >> 3
|
|
wire_type = tag & 0x07
|
|
|
|
if field_num == 0 or field_num > 1000:
|
|
break
|
|
|
|
if wire_type == WIRE_VARINT:
|
|
value = self.read_varint()
|
|
elif wire_type == WIRE_FIXED64:
|
|
value = self.read_fixed64()
|
|
elif wire_type == WIRE_LENGTH:
|
|
value = self.read_length_delimited()
|
|
elif wire_type == WIRE_FIXED32:
|
|
value = self.read_fixed32()
|
|
else:
|
|
break
|
|
|
|
# If this is Field 16, parse its contents in detail
|
|
if field_num == 16 and wire_type == WIRE_LENGTH:
|
|
entry = self.parse_tank_entry(value)
|
|
entry['raw_hex'] = value.hex()
|
|
entry['raw_len'] = len(value)
|
|
entries.append(entry)
|
|
|
|
except:
|
|
break
|
|
|
|
return entries
|
|
|
|
def parse_tank_entry(self, data: bytes) -> dict:
|
|
"""Parse a single tank entry and return all fields."""
|
|
entry = {'fields': {}}
|
|
pos = 0
|
|
|
|
while pos < len(data):
|
|
if pos >= len(data):
|
|
break
|
|
try:
|
|
# Read tag
|
|
tag_byte = data[pos]
|
|
pos += 1
|
|
|
|
# Handle multi-byte varints for tag
|
|
tag = tag_byte & 0x7F
|
|
shift = 7
|
|
while tag_byte & 0x80 and pos < len(data):
|
|
tag_byte = data[pos]
|
|
pos += 1
|
|
tag |= (tag_byte & 0x7F) << shift
|
|
shift += 7
|
|
|
|
field_num = tag >> 3
|
|
wire_type = tag & 0x07
|
|
|
|
if field_num == 0 or field_num > 100:
|
|
break
|
|
|
|
if wire_type == WIRE_VARINT:
|
|
# Read varint value
|
|
val = 0
|
|
shift = 0
|
|
while pos < len(data):
|
|
byte = data[pos]
|
|
pos += 1
|
|
val |= (byte & 0x7F) << shift
|
|
if not (byte & 0x80):
|
|
break
|
|
shift += 7
|
|
entry['fields'][field_num] = ('varint', val)
|
|
|
|
elif wire_type == WIRE_FIXED32:
|
|
raw = data[pos:pos + 4]
|
|
pos += 4
|
|
try:
|
|
f = struct.unpack('<f', raw)[0]
|
|
entry['fields'][field_num] = ('float', f, raw.hex())
|
|
except:
|
|
entry['fields'][field_num] = ('fixed32', raw.hex())
|
|
|
|
elif wire_type == WIRE_FIXED64:
|
|
raw = data[pos:pos + 8]
|
|
pos += 8
|
|
try:
|
|
d = struct.unpack('<d', raw)[0]
|
|
entry['fields'][field_num] = ('double', d, raw.hex())
|
|
except:
|
|
entry['fields'][field_num] = ('fixed64', raw.hex())
|
|
|
|
elif wire_type == WIRE_LENGTH:
|
|
# Read length
|
|
length = 0
|
|
shift = 0
|
|
while pos < len(data):
|
|
byte = data[pos]
|
|
pos += 1
|
|
length |= (byte & 0x7F) << shift
|
|
if not (byte & 0x80):
|
|
break
|
|
shift += 7
|
|
raw = data[pos:pos + length]
|
|
pos += length
|
|
entry['fields'][field_num] = ('bytes', len(raw), raw.hex()[:40])
|
|
|
|
else:
|
|
break
|
|
|
|
except Exception as e:
|
|
entry['parse_error'] = str(e)
|
|
break
|
|
|
|
return entry
|
|
|
|
|
|
def scan_packet(data: bytes):
|
|
"""Scan a packet and dump all Field 16 entries."""
|
|
if len(data) < HEADER_SIZE + 5:
|
|
return
|
|
|
|
proto_data = data[HEADER_SIZE:]
|
|
parser = ProtobufParser(proto_data)
|
|
entries = parser.parse_all_field16()
|
|
|
|
if entries:
|
|
print(f"\n{'='*70}")
|
|
print(f"Packet size: {len(data)} bytes, Found {len(entries)} tank entries")
|
|
print(f"{'='*70}")
|
|
|
|
for i, entry in enumerate(entries):
|
|
fields = entry['fields']
|
|
|
|
# Extract known fields
|
|
tank_id = fields.get(1, (None, None))[1] if 1 in fields else None
|
|
status = fields.get(2, (None, None))[1] if 2 in fields else None
|
|
level = fields.get(3, (None, None))[1] if 3 in fields else None
|
|
|
|
print(f"\n Entry {i+1}: (raw length: {entry['raw_len']} bytes)")
|
|
print(f" Tank ID (field 1): {tank_id}")
|
|
print(f" Status (field 2): {status}")
|
|
print(f" Level (field 3): {level}")
|
|
print(f" Raw hex: {entry['raw_hex'][:60]}{'...' if len(entry['raw_hex']) > 60 else ''}")
|
|
print(f" All fields present: {sorted(fields.keys())}")
|
|
|
|
# Show any extra fields
|
|
for fn, fv in sorted(fields.items()):
|
|
if fn not in (1, 2, 3):
|
|
print(f" Field {fn}: {fv}")
|
|
|
|
|
|
def main():
|
|
import argparse
|
|
parser = argparse.ArgumentParser(description="Debug tank entries")
|
|
parser.add_argument('-i', '--interface', required=True, help='Interface IP')
|
|
parser.add_argument('-t', '--time', type=int, default=5, help='Capture time (seconds)')
|
|
args = parser.parse_args()
|
|
|
|
print(f"Capturing tank data for {args.time} seconds...")
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
sock.bind(('', 2565))
|
|
mreq = struct.pack("4s4s", socket.inet_aton("226.192.206.102"), socket.inet_aton(args.interface))
|
|
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
|
sock.settimeout(1.0)
|
|
|
|
seen_sizes = set()
|
|
end_time = time.time() + args.time
|
|
|
|
try:
|
|
while time.time() < end_time:
|
|
try:
|
|
data, _ = sock.recvfrom(65535)
|
|
# Only process each unique packet size once
|
|
if len(data) not in seen_sizes:
|
|
seen_sizes.add(len(data))
|
|
scan_packet(data)
|
|
except socket.timeout:
|
|
continue
|
|
except KeyboardInterrupt:
|
|
pass
|
|
finally:
|
|
sock.close()
|
|
|
|
print("\n\nDone.")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|