sfm first build
This commit is contained in:
19
minimateplus/__init__.py
Normal file
19
minimateplus/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""
|
||||
minimateplus — Instantel MiniMate Plus protocol library.
|
||||
|
||||
Provides a clean Python API for communicating with MiniMate Plus seismographs
|
||||
over RS-232 serial (direct cable) or TCP (via RV50 cellular modem bridge).
|
||||
|
||||
Typical usage:
|
||||
from minimateplus import MiniMateClient
|
||||
|
||||
with MiniMateClient("COM5") as device:
|
||||
info = device.connect()
|
||||
events = device.get_events()
|
||||
"""
|
||||
|
||||
from .client import MiniMateClient
|
||||
from .models import DeviceInfo, Event
|
||||
|
||||
__version__ = "0.1.0"
|
||||
__all__ = ["MiniMateClient", "DeviceInfo", "Event"]
|
||||
477
minimateplus/client.py
Normal file
477
minimateplus/client.py
Normal file
@@ -0,0 +1,477 @@
|
||||
"""
|
||||
client.py — MiniMateClient: the top-level public API for the library.
|
||||
|
||||
Combines transport, protocol, and model decoding into a single easy-to-use
|
||||
class. This is the only layer that the SFM server (sfm/server.py) imports
|
||||
directly.
|
||||
|
||||
Design: stateless per-call (connect → do work → disconnect).
|
||||
The client does not hold an open connection between calls. This keeps the
|
||||
first implementation simple and matches Blastware's observed behaviour.
|
||||
Persistent connections can be added later without changing the public API.
|
||||
|
||||
Example:
|
||||
from minimateplus import MiniMateClient
|
||||
|
||||
with MiniMateClient("COM5") as device:
|
||||
info = device.connect() # POLL handshake + identity read
|
||||
events = device.get_events() # download all events
|
||||
print(info)
|
||||
for ev in events:
|
||||
print(ev)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import struct
|
||||
from typing import Optional
|
||||
|
||||
from .framing import S3Frame
|
||||
from .models import (
|
||||
DeviceInfo,
|
||||
Event,
|
||||
PeakValues,
|
||||
ProjectInfo,
|
||||
Timestamp,
|
||||
)
|
||||
from .protocol import MiniMateProtocol, ProtocolError
|
||||
from .protocol import (
|
||||
SUB_SERIAL_NUMBER,
|
||||
SUB_FULL_CONFIG,
|
||||
SUB_EVENT_INDEX,
|
||||
SUB_EVENT_HEADER,
|
||||
SUB_WAVEFORM_RECORD,
|
||||
)
|
||||
from .transport import SerialTransport, BaseTransport
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ── MiniMateClient ────────────────────────────────────────────────────────────
|
||||
|
||||
class MiniMateClient:
|
||||
"""
|
||||
High-level client for a single MiniMate Plus device.
|
||||
|
||||
Args:
|
||||
port: Serial port name (e.g. "COM5", "/dev/ttyUSB0").
|
||||
baud: Baud rate (default 38400).
|
||||
timeout: Per-request receive timeout in seconds (default 5.0).
|
||||
transport: Optional pre-built transport (for testing / TCP future use).
|
||||
If None, a SerialTransport is constructed from port/baud.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
port: str,
|
||||
baud: int = 38_400,
|
||||
timeout: float = 5.0,
|
||||
transport: Optional[BaseTransport] = None,
|
||||
) -> None:
|
||||
self.port = port
|
||||
self.baud = baud
|
||||
self.timeout = timeout
|
||||
self._transport: Optional[BaseTransport] = transport
|
||||
self._proto: Optional[MiniMateProtocol] = None
|
||||
|
||||
# ── Connection lifecycle ──────────────────────────────────────────────────
|
||||
|
||||
def open(self) -> None:
|
||||
"""Open the transport connection."""
|
||||
if self._transport is None:
|
||||
self._transport = SerialTransport(self.port, self.baud)
|
||||
if not self._transport.is_connected:
|
||||
self._transport.connect()
|
||||
self._proto = MiniMateProtocol(self._transport, recv_timeout=self.timeout)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the transport connection."""
|
||||
if self._transport and self._transport.is_connected:
|
||||
self._transport.disconnect()
|
||||
self._proto = None
|
||||
|
||||
@property
|
||||
def is_open(self) -> bool:
|
||||
return bool(self._transport and self._transport.is_connected)
|
||||
|
||||
# ── Context manager ───────────────────────────────────────────────────────
|
||||
|
||||
def __enter__(self) -> "MiniMateClient":
|
||||
self.open()
|
||||
return self
|
||||
|
||||
def __exit__(self, *_) -> None:
|
||||
self.close()
|
||||
|
||||
# ── Public API ────────────────────────────────────────────────────────────
|
||||
|
||||
def connect(self) -> DeviceInfo:
|
||||
"""
|
||||
Perform the startup handshake and read device identity.
|
||||
|
||||
Opens the connection if not already open.
|
||||
|
||||
Reads:
|
||||
1. POLL handshake (startup)
|
||||
2. SUB 15 — serial number
|
||||
3. SUB 01 — full config block (firmware, model strings)
|
||||
|
||||
Returns:
|
||||
Populated DeviceInfo.
|
||||
|
||||
Raises:
|
||||
ProtocolError: on any communication failure.
|
||||
"""
|
||||
if not self.is_open:
|
||||
self.open()
|
||||
|
||||
proto = self._require_proto()
|
||||
|
||||
log.info("connect: POLL startup")
|
||||
proto.startup()
|
||||
|
||||
log.info("connect: reading serial number (SUB 15)")
|
||||
sn_data = proto.read(SUB_SERIAL_NUMBER)
|
||||
device_info = _decode_serial_number(sn_data)
|
||||
|
||||
log.info("connect: reading full config (SUB 01)")
|
||||
cfg_data = proto.read(SUB_FULL_CONFIG)
|
||||
_decode_full_config_into(cfg_data, device_info)
|
||||
|
||||
log.info("connect: %s", device_info)
|
||||
return device_info
|
||||
|
||||
def get_events(self, include_waveforms: bool = True) -> list[Event]:
|
||||
"""
|
||||
Download all stored events from the device.
|
||||
|
||||
For each event in the index:
|
||||
1. SUB 1E — event header (timestamp, sample rate)
|
||||
2. SUB 0C — full waveform record (peak values, project strings)
|
||||
|
||||
Raw ADC waveform samples (SUB 5A bulk stream) are NOT downloaded
|
||||
here — they can be large. Pass include_waveforms=True to also
|
||||
download them (not yet implemented, reserved for a future call).
|
||||
|
||||
Args:
|
||||
include_waveforms: Reserved. Currently ignored.
|
||||
|
||||
Returns:
|
||||
List of Event objects, one per stored record on the device.
|
||||
|
||||
Raises:
|
||||
ProtocolError: on any communication failure.
|
||||
"""
|
||||
proto = self._require_proto()
|
||||
|
||||
log.info("get_events: reading event index (SUB 08)")
|
||||
index_data = proto.read(SUB_EVENT_INDEX)
|
||||
event_count = _decode_event_count(index_data)
|
||||
log.info("get_events: %d event(s) found", event_count)
|
||||
|
||||
events: list[Event] = []
|
||||
for i in range(event_count):
|
||||
log.info("get_events: downloading event %d/%d", i + 1, event_count)
|
||||
ev = self._download_event(proto, i)
|
||||
if ev:
|
||||
events.append(ev)
|
||||
|
||||
return events
|
||||
|
||||
# ── Internal helpers ──────────────────────────────────────────────────────
|
||||
|
||||
def _require_proto(self) -> MiniMateProtocol:
|
||||
if self._proto is None:
|
||||
raise RuntimeError("MiniMateClient is not connected. Call open() first.")
|
||||
return self._proto
|
||||
|
||||
def _download_event(
|
||||
self, proto: MiniMateProtocol, index: int
|
||||
) -> Optional[Event]:
|
||||
"""Download header + waveform record for one event by index."""
|
||||
ev = Event(index=index)
|
||||
|
||||
# SUB 1E — event header (timestamp, sample rate).
|
||||
#
|
||||
# The two-step event-header read passes the event index at payload[5]
|
||||
# of the data-request frame (consistent with all other reads).
|
||||
# This limits addressing to events 0–255 without a multi-byte scheme;
|
||||
# the MiniMate Plus stores up to ~1000 events, so high indices may need
|
||||
# a revised approach once we have captured event-download frames.
|
||||
try:
|
||||
from .framing import build_bw_frame
|
||||
from .protocol import _expected_rsp_sub, SUB_EVENT_HEADER
|
||||
|
||||
# Step 1 — probe (offset=0)
|
||||
probe_frame = build_bw_frame(SUB_EVENT_HEADER, 0)
|
||||
proto._send(probe_frame)
|
||||
_probe_rsp = proto._recv_one(expected_sub=_expected_rsp_sub(SUB_EVENT_HEADER))
|
||||
|
||||
# Step 2 — data request (offset = event index, clamped to 0xFF)
|
||||
event_offset = min(index, 0xFF)
|
||||
data_frame = build_bw_frame(SUB_EVENT_HEADER, event_offset)
|
||||
proto._send(data_frame)
|
||||
data_rsp = proto._recv_one(expected_sub=_expected_rsp_sub(SUB_EVENT_HEADER))
|
||||
|
||||
_decode_event_header_into(data_rsp.data, ev)
|
||||
except ProtocolError as exc:
|
||||
log.warning("event %d: header read failed: %s", index, exc)
|
||||
return ev # Return partial event rather than losing it entirely
|
||||
|
||||
# SUB 0C — full waveform record (peak values, project strings).
|
||||
try:
|
||||
wf_data = proto.read(SUB_WAVEFORM_RECORD)
|
||||
_decode_waveform_record_into(wf_data, ev)
|
||||
except ProtocolError as exc:
|
||||
log.warning("event %d: waveform record read failed: %s", index, exc)
|
||||
|
||||
return ev
|
||||
|
||||
|
||||
# ── Decoder functions ─────────────────────────────────────────────────────────
|
||||
#
|
||||
# Pure functions: bytes → model field population.
|
||||
# Kept here (not in models.py) to isolate protocol knowledge from data shapes.
|
||||
|
||||
def _decode_serial_number(data: bytes) -> DeviceInfo:
|
||||
"""
|
||||
Decode SUB EA (SERIAL_NUMBER_RESPONSE) payload into a new DeviceInfo.
|
||||
|
||||
Layout (10 bytes total per §7.2):
|
||||
bytes 0–7: serial string, null-terminated, null-padded ("BE18189\\x00")
|
||||
byte 8: unit-specific trailing byte (purpose unknown ❓)
|
||||
byte 9: firmware minor version (0x11 = 17) ✅
|
||||
|
||||
Returns:
|
||||
New DeviceInfo with serial, firmware_minor, serial_trail_0 populated.
|
||||
"""
|
||||
if len(data) < 9:
|
||||
# Short payload — gracefully degrade
|
||||
serial = data.rstrip(b"\x00").decode("ascii", errors="replace")
|
||||
return DeviceInfo(serial=serial, firmware_minor=0)
|
||||
|
||||
serial = data[:8].rstrip(b"\x00").decode("ascii", errors="replace")
|
||||
trail_0 = data[8] if len(data) > 8 else None
|
||||
fw_minor = data[9] if len(data) > 9 else 0
|
||||
|
||||
return DeviceInfo(
|
||||
serial=serial,
|
||||
firmware_minor=fw_minor,
|
||||
serial_trail_0=trail_0,
|
||||
)
|
||||
|
||||
|
||||
def _decode_full_config_into(data: bytes, info: DeviceInfo) -> None:
|
||||
"""
|
||||
Decode SUB FE (FULL_CONFIG_RESPONSE) payload into an existing DeviceInfo.
|
||||
|
||||
The FE response arrives as a composite S3 outer frame whose data section
|
||||
contains inner DLE-framed sub-frames. Because of this nesting the §7.3
|
||||
fixed offsets (0x34, 0x3C, 0x44, 0x6D) are unreliable — they assume a
|
||||
clean non-nested payload starting at byte 0.
|
||||
|
||||
Instead we search the whole byte array for known ASCII patterns. The
|
||||
strings are long enough to be unique in any reasonable payload.
|
||||
|
||||
Modifies info in-place.
|
||||
"""
|
||||
def _extract(needle: bytes, max_len: int = 32) -> Optional[str]:
|
||||
"""Return the null-terminated ASCII string that starts with *needle*."""
|
||||
pos = data.find(needle)
|
||||
if pos < 0:
|
||||
return None
|
||||
end = pos
|
||||
while end < len(data) and data[end] != 0 and (end - pos) < max_len:
|
||||
end += 1
|
||||
s = data[pos:end].decode("ascii", errors="replace").strip()
|
||||
return s or None
|
||||
|
||||
# ── Manufacturer and model are straightforward literal matches ────────────
|
||||
info.manufacturer = _extract(b"Instantel")
|
||||
info.model = _extract(b"MiniMate Plus")
|
||||
|
||||
# ── Firmware version: "S3xx.xx" — scan for the 'S3' prefix ───────────────
|
||||
for i in range(len(data) - 5):
|
||||
if data[i] == ord('S') and data[i + 1] == ord('3') and chr(data[i + 2]).isdigit():
|
||||
end = i
|
||||
while end < len(data) and data[end] not in (0, 0x20) and (end - i) < 12:
|
||||
end += 1
|
||||
candidate = data[i:end].decode("ascii", errors="replace").strip()
|
||||
if "." in candidate and len(candidate) >= 5:
|
||||
info.firmware_version = candidate
|
||||
break
|
||||
|
||||
# ── DSP version: numeric "xx.xx" — search for known prefixes ─────────────
|
||||
for prefix in (b"10.", b"11.", b"12.", b"9.", b"8."):
|
||||
pos = data.find(prefix)
|
||||
if pos < 0:
|
||||
continue
|
||||
end = pos
|
||||
while end < len(data) and data[end] not in (0, 0x20) and (end - pos) < 8:
|
||||
end += 1
|
||||
candidate = data[pos:end].decode("ascii", errors="replace").strip()
|
||||
# Accept only strings that look like "digits.digits"
|
||||
if "." in candidate and all(c in "0123456789." for c in candidate):
|
||||
info.dsp_version = candidate
|
||||
break
|
||||
|
||||
|
||||
def _decode_event_count(data: bytes) -> int:
|
||||
"""
|
||||
Extract stored event count from SUB F7 (EVENT_INDEX_RESPONSE) payload.
|
||||
|
||||
Layout per §7.4 (offsets from data section start):
|
||||
+00: 00 58 09 — total index size or record count ❓
|
||||
+03: 00 00 00 01 — possibly stored event count = 1 ❓
|
||||
|
||||
We use bytes +03..+06 interpreted as uint32 BE as the event count.
|
||||
This is inferred (🔶) — the exact meaning of the first 3 bytes is unclear.
|
||||
"""
|
||||
if len(data) < 7:
|
||||
log.warning("event index payload too short (%d bytes), assuming 0 events", len(data))
|
||||
return 0
|
||||
|
||||
# Try the uint32 at +3 first
|
||||
count = struct.unpack_from(">I", data, 3)[0]
|
||||
|
||||
# Sanity check: MiniMate Plus manual says max ~1000 events
|
||||
if count > 1000:
|
||||
log.warning(
|
||||
"event count %d looks unreasonably large — clamping to 0", count
|
||||
)
|
||||
return 0
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def _decode_event_header_into(data: bytes, event: Event) -> None:
|
||||
"""
|
||||
Decode SUB E1 (EVENT_HEADER_RESPONSE) into an existing Event.
|
||||
|
||||
The 6-byte timestamp is at the start of the data payload.
|
||||
Sample rate location is not yet confirmed — left as None for now.
|
||||
|
||||
Modifies event in-place.
|
||||
"""
|
||||
if len(data) < 6:
|
||||
log.warning("event header payload too short (%d bytes)", len(data))
|
||||
return
|
||||
try:
|
||||
event.timestamp = Timestamp.from_bytes(data[:6])
|
||||
except ValueError as exc:
|
||||
log.warning("event header timestamp decode failed: %s", exc)
|
||||
|
||||
|
||||
def _decode_waveform_record_into(data: bytes, event: Event) -> None:
|
||||
"""
|
||||
Decode SUB F3 (FULL_WAVEFORM_RECORD) data into an existing Event.
|
||||
|
||||
Peak values are stored as IEEE 754 big-endian floats. Confirmed
|
||||
positions per §7.5 (search for the known float bytes in the payload).
|
||||
|
||||
This decoder is intentionally conservative — it searches for the
|
||||
canonical 4×float32 pattern rather than relying on a fixed offset,
|
||||
since the exact field layout is only partially confirmed.
|
||||
|
||||
Modifies event in-place.
|
||||
"""
|
||||
# Attempt to extract four consecutive IEEE 754 BE floats from the
|
||||
# known region of the payload (offsets are 🔶 INFERRED from captured data)
|
||||
try:
|
||||
peak_values = _extract_peak_floats(data)
|
||||
if peak_values:
|
||||
event.peak_values = peak_values
|
||||
except Exception as exc:
|
||||
log.warning("waveform record peak decode failed: %s", exc)
|
||||
|
||||
# Project strings — search for known ASCII labels
|
||||
try:
|
||||
project_info = _extract_project_strings(data)
|
||||
if project_info:
|
||||
event.project_info = project_info
|
||||
except Exception as exc:
|
||||
log.warning("waveform record project strings decode failed: %s", exc)
|
||||
|
||||
|
||||
def _extract_peak_floats(data: bytes) -> Optional[PeakValues]:
|
||||
"""
|
||||
Scan the waveform record payload for four sequential float32 BE values
|
||||
corresponding to Tran, Vert, Long, MicL peak values.
|
||||
|
||||
The exact offset is not confirmed (🔶), so we do a heuristic scan:
|
||||
look for four consecutive 4-byte groups where each decodes as a
|
||||
plausible PPV value (0 < v < 100 in/s or psi).
|
||||
|
||||
Returns PeakValues if a plausible group is found, else None.
|
||||
"""
|
||||
# Require at least 16 bytes for 4 floats
|
||||
if len(data) < 16:
|
||||
return None
|
||||
|
||||
for start in range(0, len(data) - 15, 4):
|
||||
try:
|
||||
vals = struct.unpack_from(">4f", data, start)
|
||||
except struct.error:
|
||||
continue
|
||||
|
||||
# All four values should be non-negative and within plausible PPV range
|
||||
if all(0.0 <= v < 100.0 for v in vals):
|
||||
tran, vert, long_, micl = vals
|
||||
# MicL (psi) is typically much smaller than geo values
|
||||
# Simple sanity: at least two non-zero values
|
||||
if sum(v > 0 for v in vals) >= 2:
|
||||
log.debug(
|
||||
"peak floats at offset %d: T=%.4f V=%.4f L=%.4f M=%.6f",
|
||||
start, tran, vert, long_, micl
|
||||
)
|
||||
return PeakValues(
|
||||
tran=tran, vert=vert, long=long_, micl=micl
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def _extract_project_strings(data: bytes) -> Optional[ProjectInfo]:
|
||||
"""
|
||||
Search the waveform record payload for known ASCII label strings
|
||||
("Project:", "Client:", "User Name:", "Seis Loc:", "Extended Notes")
|
||||
and extract the associated value strings that follow them.
|
||||
|
||||
Layout (per §7.5): each entry is [label ~16 bytes][value ~32 bytes],
|
||||
null-padded. We find the label, then read the next non-null chars.
|
||||
"""
|
||||
def _find_string_after(needle: bytes, max_value_len: int = 64) -> Optional[str]:
|
||||
pos = data.find(needle)
|
||||
if pos < 0:
|
||||
return None
|
||||
# Skip the label (including null padding) until we find a non-null value
|
||||
# The value starts at pos+len(needle), but may have a gap of null bytes
|
||||
value_start = pos + len(needle)
|
||||
# Skip nulls
|
||||
while value_start < len(data) and data[value_start] == 0:
|
||||
value_start += 1
|
||||
if value_start >= len(data):
|
||||
return None
|
||||
# Read until null terminator or max_value_len
|
||||
end = value_start
|
||||
while end < len(data) and data[end] != 0 and (end - value_start) < max_value_len:
|
||||
end += 1
|
||||
value = data[value_start:end].decode("ascii", errors="replace").strip()
|
||||
return value or None
|
||||
|
||||
project = _find_string_after(b"Project:")
|
||||
client = _find_string_after(b"Client:")
|
||||
operator = _find_string_after(b"User Name:")
|
||||
location = _find_string_after(b"Seis Loc:")
|
||||
notes = _find_string_after(b"Extended Notes")
|
||||
|
||||
if not any([project, client, operator, location, notes]):
|
||||
return None
|
||||
|
||||
return ProjectInfo(
|
||||
project=project,
|
||||
client=client,
|
||||
operator=operator,
|
||||
sensor_location=location,
|
||||
notes=notes,
|
||||
)
|
||||
262
minimateplus/framing.py
Normal file
262
minimateplus/framing.py
Normal file
@@ -0,0 +1,262 @@
|
||||
"""
|
||||
framing.py — DLE frame codec for the Instantel MiniMate Plus RS-232 protocol.
|
||||
|
||||
Wire format:
|
||||
BW→S3 (our requests): [ACK=0x41] [STX=0x02] [stuffed payload+chk] [ETX=0x03]
|
||||
S3→BW (device replies): [DLE=0x10] [STX=0x02] [stuffed payload+chk] [DLE=0x10] [ETX=0x03]
|
||||
|
||||
The ACK 0x41 byte often precedes S3 frames too — it is silently discarded
|
||||
by the streaming parser.
|
||||
|
||||
De-stuffed payload layout:
|
||||
BW→S3 request frame:
|
||||
[0] CMD 0x10 (BW request marker)
|
||||
[1] flags 0x00
|
||||
[2] SUB command sub-byte
|
||||
[3] 0x00 always zero in captured frames
|
||||
[4] 0x00 always zero in captured frames
|
||||
[5] OFFSET two-step offset: 0x00 = length-probe, DATA_LEN = data-request
|
||||
[6-15] zero padding (total de-stuffed payload = 16 bytes)
|
||||
|
||||
S3→BW response frame:
|
||||
[0] CMD 0x00 (S3 response marker)
|
||||
[1] flags 0x10
|
||||
[2] SUB response sub-byte (= 0xFF - request SUB)
|
||||
[3] PAGE_HI high byte of page address (always 0x00 in observed frames)
|
||||
[4] PAGE_LO low byte (always 0x00 in observed frames)
|
||||
[5+] data payload data section (composite inner frames for large responses)
|
||||
|
||||
DLE stuffing rule: any 0x10 byte in the payload is doubled on the wire (0x10 → 0x10 0x10).
|
||||
This applies to the checksum byte too.
|
||||
|
||||
Confirmed from live captures (s3_parser.py validation + raw_bw.bin / raw_s3.bin).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
# ── Protocol byte constants ───────────────────────────────────────────────────
|
||||
|
||||
DLE = 0x10 # Data Link Escape
|
||||
STX = 0x02 # Start of text
|
||||
ETX = 0x03 # End of text
|
||||
ACK = 0x41 # Acknowledgement / frame-start marker (BW side)
|
||||
|
||||
BW_CMD = 0x10 # CMD byte value in BW→S3 frames
|
||||
S3_CMD = 0x00 # CMD byte value in S3→BW frames
|
||||
S3_FLAGS = 0x10 # flags byte value in S3→BW frames
|
||||
|
||||
# BW read-command payload size: 5 header bytes + 11 padding bytes = 16 total.
|
||||
# Confirmed from captured raw_bw.bin: all read-command frames carry exactly 16
|
||||
# de-stuffed bytes (excluding the appended checksum).
|
||||
_BW_PAYLOAD_SIZE = 16
|
||||
|
||||
|
||||
# ── DLE stuffing / de-stuffing ────────────────────────────────────────────────
|
||||
|
||||
def dle_stuff(data: bytes) -> bytes:
|
||||
"""Escape literal 0x10 bytes: 0x10 → 0x10 0x10."""
|
||||
out = bytearray()
|
||||
for b in data:
|
||||
if b == DLE:
|
||||
out.append(DLE)
|
||||
out.append(b)
|
||||
return bytes(out)
|
||||
|
||||
|
||||
def dle_unstuff(data: bytes) -> bytes:
|
||||
"""Remove DLE stuffing: 0x10 0x10 → 0x10."""
|
||||
out = bytearray()
|
||||
i = 0
|
||||
while i < len(data):
|
||||
b = data[i]
|
||||
if b == DLE and i + 1 < len(data) and data[i + 1] == DLE:
|
||||
out.append(DLE)
|
||||
i += 2
|
||||
else:
|
||||
out.append(b)
|
||||
i += 1
|
||||
return bytes(out)
|
||||
|
||||
|
||||
# ── Checksum ─────────────────────────────────────────────────────────────────
|
||||
|
||||
def checksum(payload: bytes) -> int:
|
||||
"""SUM8: sum of all de-stuffed payload bytes, mod 256."""
|
||||
return sum(payload) & 0xFF
|
||||
|
||||
|
||||
# ── BW→S3 frame builder ───────────────────────────────────────────────────────
|
||||
|
||||
def build_bw_frame(sub: int, offset: int = 0) -> bytes:
|
||||
"""
|
||||
Build a BW→S3 read-command frame.
|
||||
|
||||
The payload is always 16 de-stuffed bytes:
|
||||
[BW_CMD, 0x00, sub, 0x00, 0x00, offset, 0x00 × 10]
|
||||
|
||||
Confirmed from BW capture analysis: payload[3] and payload[4] are always
|
||||
0x00 across all observed read commands. The two-step offset lives at
|
||||
payload[5]: 0x00 for the length-probe step, DATA_LEN for the data-fetch step.
|
||||
|
||||
Wire output: [ACK] [STX] dle_stuff(payload + checksum) [ETX]
|
||||
|
||||
Args:
|
||||
sub: SUB command byte (e.g. 0x01 = FULL_CONFIG_READ)
|
||||
offset: Value placed at payload[5].
|
||||
Pass 0 for the probe step; pass DATA_LENGTHS[sub] for the data step.
|
||||
|
||||
Returns:
|
||||
Complete frame bytes ready to write to the serial port / socket.
|
||||
"""
|
||||
payload = bytes([BW_CMD, 0x00, sub, 0x00, 0x00, offset]) + bytes(_BW_PAYLOAD_SIZE - 6)
|
||||
chk = checksum(payload)
|
||||
wire = bytes([ACK, STX]) + dle_stuff(payload + bytes([chk])) + bytes([ETX])
|
||||
return wire
|
||||
|
||||
|
||||
# ── Pre-built POLL frames ─────────────────────────────────────────────────────
|
||||
#
|
||||
# POLL (SUB 0x5B) uses the same two-step pattern as all other reads — the
|
||||
# hardcoded length 0x30 lives at payload[5], exactly as in build_bw_frame().
|
||||
|
||||
POLL_PROBE = build_bw_frame(0x5B, 0x00) # length-probe POLL (offset = 0)
|
||||
POLL_DATA = build_bw_frame(0x5B, 0x30) # data-request POLL (offset = 0x30)
|
||||
|
||||
|
||||
# ── S3 response dataclass ─────────────────────────────────────────────────────
|
||||
|
||||
@dataclass
|
||||
class S3Frame:
|
||||
"""A fully parsed and de-stuffed S3→BW response frame."""
|
||||
sub: int # response SUB byte (e.g. 0xA4 = POLL_RESPONSE)
|
||||
page_hi: int # PAGE_HI from header (= data length on step-2 length response)
|
||||
page_lo: int # PAGE_LO from header
|
||||
data: bytes # payload data section (payload[5:], checksum already stripped)
|
||||
checksum_valid: bool
|
||||
|
||||
@property
|
||||
def page_key(self) -> int:
|
||||
"""Combined 16-bit page address / length: (page_hi << 8) | page_lo."""
|
||||
return (self.page_hi << 8) | self.page_lo
|
||||
|
||||
|
||||
# ── Streaming S3 frame parser ─────────────────────────────────────────────────
|
||||
|
||||
class S3FrameParser:
|
||||
"""
|
||||
Incremental byte-stream parser for S3→BW response frames.
|
||||
|
||||
Feed incoming bytes with feed(). Complete, valid frames are returned
|
||||
immediately and also accumulated in self.frames.
|
||||
|
||||
State machine:
|
||||
IDLE — scanning for DLE (0x10)
|
||||
SEEN_DLE — saw DLE, waiting for STX (0x02) to start a frame
|
||||
IN_FRAME — collecting de-stuffed payload bytes
|
||||
IN_FRAME_DLE — inside frame, saw DLE; ETX ends frame, DLE continues stuffing
|
||||
|
||||
ACK (0x41) bytes and arbitrary non-DLE bytes in IDLE state are silently
|
||||
discarded (covers device boot string "Operating System" and keepalive ACKs).
|
||||
"""
|
||||
|
||||
_IDLE = 0
|
||||
_SEEN_DLE = 1
|
||||
_IN_FRAME = 2
|
||||
_IN_FRAME_DLE = 3
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._state = self._IDLE
|
||||
self._body = bytearray() # accumulates de-stuffed frame bytes
|
||||
self.frames: list[S3Frame] = []
|
||||
|
||||
def reset(self) -> None:
|
||||
self._state = self._IDLE
|
||||
self._body.clear()
|
||||
|
||||
def feed(self, data: bytes) -> list[S3Frame]:
|
||||
"""
|
||||
Process a chunk of incoming bytes.
|
||||
|
||||
Returns a list of S3Frame objects completed during this call.
|
||||
All completed frames are also appended to self.frames.
|
||||
"""
|
||||
completed: list[S3Frame] = []
|
||||
for b in data:
|
||||
frame = self._step(b)
|
||||
if frame is not None:
|
||||
completed.append(frame)
|
||||
self.frames.append(frame)
|
||||
return completed
|
||||
|
||||
def _step(self, b: int) -> Optional[S3Frame]:
|
||||
"""Process one byte. Returns a completed S3Frame or None."""
|
||||
|
||||
if self._state == self._IDLE:
|
||||
if b == DLE:
|
||||
self._state = self._SEEN_DLE
|
||||
# ACK, boot strings, garbage — silently ignored
|
||||
|
||||
elif self._state == self._SEEN_DLE:
|
||||
if b == STX:
|
||||
self._body.clear()
|
||||
self._state = self._IN_FRAME
|
||||
else:
|
||||
# Stray DLE not followed by STX — back to idle
|
||||
self._state = self._IDLE
|
||||
|
||||
elif self._state == self._IN_FRAME:
|
||||
if b == DLE:
|
||||
self._state = self._IN_FRAME_DLE
|
||||
else:
|
||||
self._body.append(b)
|
||||
|
||||
elif self._state == self._IN_FRAME_DLE:
|
||||
if b == DLE:
|
||||
# DLE DLE → literal 0x10 in payload
|
||||
self._body.append(DLE)
|
||||
self._state = self._IN_FRAME
|
||||
elif b == ETX:
|
||||
# End of frame
|
||||
frame = self._finalise()
|
||||
self._state = self._IDLE
|
||||
return frame
|
||||
else:
|
||||
# Unexpected DLE + byte — treat both as literal data and continue
|
||||
self._body.append(DLE)
|
||||
self._body.append(b)
|
||||
self._state = self._IN_FRAME
|
||||
|
||||
return None
|
||||
|
||||
def _finalise(self) -> Optional[S3Frame]:
|
||||
"""
|
||||
Called when DLE+ETX is seen. Validates checksum and builds S3Frame.
|
||||
Returns None if the frame is too short or structurally invalid.
|
||||
"""
|
||||
body = bytes(self._body)
|
||||
|
||||
# Minimum valid frame: 5-byte header + at least 1 checksum byte = 6
|
||||
if len(body) < 6:
|
||||
return None
|
||||
|
||||
raw_payload = body[:-1] # everything except the trailing checksum byte
|
||||
chk_received = body[-1]
|
||||
chk_computed = checksum(raw_payload)
|
||||
|
||||
if len(raw_payload) < 5:
|
||||
return None
|
||||
|
||||
# Validate CMD byte — we only accept S3→BW response frames here
|
||||
if raw_payload[0] != S3_CMD:
|
||||
return None
|
||||
|
||||
return S3Frame(
|
||||
sub = raw_payload[2],
|
||||
page_hi = raw_payload[3],
|
||||
page_lo = raw_payload[4],
|
||||
data = raw_payload[5:],
|
||||
checksum_valid = (chk_received == chk_computed),
|
||||
)
|
||||
215
minimateplus/models.py
Normal file
215
minimateplus/models.py
Normal file
@@ -0,0 +1,215 @@
|
||||
"""
|
||||
models.py — Plain-Python data models for the MiniMate Plus protocol library.
|
||||
|
||||
All models are intentionally simple dataclasses with no protocol logic.
|
||||
They represent *decoded* device data — the client layer translates raw frame
|
||||
bytes into these objects, and the SFM API layer serialises them to JSON.
|
||||
|
||||
Notes on certainty:
|
||||
Fields marked ✅ are confirmed from captured data.
|
||||
Fields marked 🔶 are strongly inferred but not formally proven.
|
||||
Fields marked ❓ are present in the captured payload but not yet decoded.
|
||||
See docs/instantel_protocol_reference.md for full derivation details.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import struct
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# ── Timestamp ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@dataclass
|
||||
class Timestamp:
|
||||
"""
|
||||
6-byte event timestamp decoded from the MiniMate Plus wire format.
|
||||
|
||||
Wire layout: [flag:1] [year:2 BE] [unknown:1] [month:1] [day:1]
|
||||
|
||||
The year 1995 is the device's factory-default RTC date — it appears
|
||||
whenever the battery has been disconnected. Treat 1995 as "clock not set".
|
||||
"""
|
||||
raw: bytes # raw 6-byte sequence for round-tripping
|
||||
flag: int # byte 0 — validity/type flag (usually 0x01) 🔶
|
||||
year: int # bytes 1–2 big-endian uint16 ✅
|
||||
unknown_byte: int # byte 3 — likely hours/minutes ❓
|
||||
month: int # byte 4 ✅
|
||||
day: int # byte 5 ✅
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> "Timestamp":
|
||||
"""
|
||||
Decode a 6-byte timestamp sequence.
|
||||
|
||||
Args:
|
||||
data: exactly 6 bytes from the device payload.
|
||||
|
||||
Returns:
|
||||
Decoded Timestamp.
|
||||
|
||||
Raises:
|
||||
ValueError: if data is not exactly 6 bytes.
|
||||
"""
|
||||
if len(data) != 6:
|
||||
raise ValueError(f"Timestamp requires exactly 6 bytes, got {len(data)}")
|
||||
flag = data[0]
|
||||
year = struct.unpack_from(">H", data, 1)[0]
|
||||
unknown_byte = data[3]
|
||||
month = data[4]
|
||||
day = data[5]
|
||||
return cls(
|
||||
raw=bytes(data),
|
||||
flag=flag,
|
||||
year=year,
|
||||
unknown_byte=unknown_byte,
|
||||
month=month,
|
||||
day=day,
|
||||
)
|
||||
|
||||
@property
|
||||
def clock_set(self) -> bool:
|
||||
"""False when year == 1995 (factory default / battery-lost state)."""
|
||||
return self.year != 1995
|
||||
|
||||
def __str__(self) -> str:
|
||||
if not self.clock_set:
|
||||
return f"CLOCK_NOT_SET ({self.year}-{self.month:02d}-{self.day:02d})"
|
||||
return f"{self.year}-{self.month:02d}-{self.day:02d}"
|
||||
|
||||
|
||||
# ── Device identity ───────────────────────────────────────────────────────────
|
||||
|
||||
@dataclass
|
||||
class DeviceInfo:
|
||||
"""
|
||||
Combined device identity information gathered during the startup sequence.
|
||||
|
||||
Populated from three response SUBs:
|
||||
- SUB EA (SERIAL_NUMBER_RESPONSE): serial, firmware_minor
|
||||
- SUB FE (FULL_CONFIG_RESPONSE): serial (repeat), firmware_version,
|
||||
dsp_version, manufacturer, model
|
||||
- SUB A4 (POLL_RESPONSE): manufacturer (repeat), model (repeat)
|
||||
|
||||
All string fields are stripped of null padding before storage.
|
||||
"""
|
||||
|
||||
# ── From SUB EA (SERIAL_NUMBER_RESPONSE) ─────────────────────────────────
|
||||
serial: str # e.g. "BE18189" ✅
|
||||
firmware_minor: int # 0x11 = 17 for S337.17 ✅
|
||||
serial_trail_0: Optional[int] = None # unit-specific byte — purpose unknown ❓
|
||||
|
||||
# ── From SUB FE (FULL_CONFIG_RESPONSE) ────────────────────────────────────
|
||||
firmware_version: Optional[str] = None # e.g. "S337.17" ✅
|
||||
dsp_version: Optional[str] = None # e.g. "10.72" ✅
|
||||
manufacturer: Optional[str] = None # e.g. "Instantel" ✅
|
||||
model: Optional[str] = None # e.g. "MiniMate Plus" ✅
|
||||
|
||||
def __str__(self) -> str:
|
||||
fw = self.firmware_version or f"?.{self.firmware_minor}"
|
||||
mdl = self.model or "MiniMate Plus"
|
||||
return f"{mdl} S/N:{self.serial} FW:{fw}"
|
||||
|
||||
|
||||
# ── Channel threshold / scaling ───────────────────────────────────────────────
|
||||
|
||||
@dataclass
|
||||
class ChannelConfig:
|
||||
"""
|
||||
Per-channel threshold and scaling values from SUB E5 / SUB 71.
|
||||
|
||||
Floats are stored in the device in imperial units (in/s for geo channels,
|
||||
psi for MicL). Unit strings embedded in the payload confirm this.
|
||||
|
||||
Certainty: ✅ CONFIRMED for trigger_level, alarm_level, unit strings.
|
||||
"""
|
||||
label: str # e.g. "Tran", "Vert", "Long", "MicL" ✅
|
||||
trigger_level: float # in/s (geo) or psi (MicL) ✅
|
||||
alarm_level: float # in/s (geo) or psi (MicL) ✅
|
||||
max_range: float # full-scale calibration constant (e.g. 6.206) 🔶
|
||||
unit_label: str # e.g. "in./s" or "psi" ✅
|
||||
|
||||
|
||||
# ── Peak values for one event ─────────────────────────────────────────────────
|
||||
|
||||
@dataclass
|
||||
class PeakValues:
|
||||
"""
|
||||
Per-channel peak particle velocity / pressure for a single event.
|
||||
|
||||
Extracted from the Full Waveform Record (SUB F3), stored as IEEE 754
|
||||
big-endian floats in the device's native units (in/s / psi).
|
||||
"""
|
||||
tran: Optional[float] = None # Transverse PPV (in/s) ✅
|
||||
vert: Optional[float] = None # Vertical PPV (in/s) ✅
|
||||
long: Optional[float] = None # Longitudinal PPV (in/s) ✅
|
||||
micl: Optional[float] = None # Air overpressure (psi) 🔶 (units uncertain)
|
||||
|
||||
|
||||
# ── Project / operator metadata ───────────────────────────────────────────────
|
||||
|
||||
@dataclass
|
||||
class ProjectInfo:
|
||||
"""
|
||||
Operator-supplied project and location strings from the Full Waveform
|
||||
Record (SUB F3) and compliance config block (SUB E5 / SUB 71).
|
||||
|
||||
All fields are optional — they may be blank if the operator did not fill
|
||||
them in through Blastware.
|
||||
"""
|
||||
setup_name: Optional[str] = None # "Standard Recording Setup"
|
||||
project: Optional[str] = None # project description
|
||||
client: Optional[str] = None # client name ✅ confirmed offset
|
||||
operator: Optional[str] = None # operator / user name
|
||||
sensor_location: Optional[str] = None # sensor location string
|
||||
notes: Optional[str] = None # extended notes
|
||||
|
||||
|
||||
# ── Event ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
@dataclass
|
||||
class Event:
|
||||
"""
|
||||
A single seismic event record downloaded from the device.
|
||||
|
||||
Populated progressively across several request/response pairs:
|
||||
1. SUB 1E (EVENT_HEADER) → index, timestamp, sample_rate
|
||||
2. SUB 0C (FULL_WAVEFORM_RECORD) → peak_values, project_info, record_type
|
||||
3. SUB 5A (BULK_WAVEFORM_STREAM) → raw_samples (downloaded on demand)
|
||||
|
||||
Fields not yet retrieved are None.
|
||||
"""
|
||||
# ── Identity ──────────────────────────────────────────────────────────────
|
||||
index: int # 0-based event number on device
|
||||
|
||||
# ── From EVENT_HEADER (SUB 1E) ────────────────────────────────────────────
|
||||
timestamp: Optional[Timestamp] = None # 6-byte timestamp ✅
|
||||
sample_rate: Optional[int] = None # samples/sec (e.g. 1024) 🔶
|
||||
|
||||
# ── From FULL_WAVEFORM_RECORD (SUB F3) ───────────────────────────────────
|
||||
peak_values: Optional[PeakValues] = None
|
||||
project_info: Optional[ProjectInfo] = None
|
||||
record_type: Optional[str] = None # e.g. "Histogram", "Waveform" 🔶
|
||||
|
||||
# ── From BULK_WAVEFORM_STREAM (SUB 5A) ───────────────────────────────────
|
||||
# Raw ADC samples keyed by channel label. Not fetched unless explicitly
|
||||
# requested (large data transfer — up to several MB per event).
|
||||
raw_samples: Optional[dict] = None # {"Tran": [...], "Vert": [...], ...}
|
||||
|
||||
def __str__(self) -> str:
|
||||
ts = str(self.timestamp) if self.timestamp else "no timestamp"
|
||||
ppv = ""
|
||||
if self.peak_values:
|
||||
pv = self.peak_values
|
||||
parts = []
|
||||
if pv.tran is not None:
|
||||
parts.append(f"T={pv.tran:.4f}")
|
||||
if pv.vert is not None:
|
||||
parts.append(f"V={pv.vert:.4f}")
|
||||
if pv.long is not None:
|
||||
parts.append(f"L={pv.long:.4f}")
|
||||
if pv.micl is not None:
|
||||
parts.append(f"M={pv.micl:.6f}")
|
||||
ppv = " [" + ", ".join(parts) + " in/s]"
|
||||
return f"Event#{self.index} {ts}{ppv}"
|
||||
310
minimateplus/protocol.py
Normal file
310
minimateplus/protocol.py
Normal file
@@ -0,0 +1,310 @@
|
||||
"""
|
||||
protocol.py — High-level MiniMate Plus request/response protocol.
|
||||
|
||||
Implements the request/response patterns documented in
|
||||
docs/instantel_protocol_reference.md on top of:
|
||||
- minimateplus.framing — DLE codec, frame builder, S3 streaming parser
|
||||
- minimateplus.transport — byte I/O (SerialTransport / future TcpTransport)
|
||||
|
||||
This module knows nothing about pyserial or TCP — it only calls
|
||||
transport.write() and transport.read_until_idle().
|
||||
|
||||
Key patterns implemented:
|
||||
- POLL startup handshake (two-step, special payload[5] format)
|
||||
- Generic two-step paged read (probe → get length → fetch data)
|
||||
- Response timeout + checksum validation
|
||||
- Boot-string drain (device sends "Operating System" ASCII before framing)
|
||||
|
||||
All public methods raise ProtocolError on timeout, bad checksum, or
|
||||
unexpected response SUB.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from .framing import (
|
||||
S3Frame,
|
||||
S3FrameParser,
|
||||
build_bw_frame,
|
||||
POLL_PROBE,
|
||||
POLL_DATA,
|
||||
)
|
||||
from .transport import BaseTransport
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ── Constants ─────────────────────────────────────────────────────────────────
|
||||
|
||||
# Response SUB = 0xFF - Request SUB (confirmed pattern, no known exceptions
|
||||
# among read commands; one write-path exception documented for SUB 1C→6E).
|
||||
def _expected_rsp_sub(req_sub: int) -> int:
|
||||
return (0xFF - req_sub) & 0xFF
|
||||
|
||||
|
||||
# SUB byte constants (request side) — see protocol reference §5.1
|
||||
SUB_POLL = 0x5B
|
||||
SUB_SERIAL_NUMBER = 0x15
|
||||
SUB_FULL_CONFIG = 0x01
|
||||
SUB_EVENT_INDEX = 0x08
|
||||
SUB_CHANNEL_CONFIG = 0x06
|
||||
SUB_TRIGGER_CONFIG = 0x1C
|
||||
SUB_EVENT_HEADER = 0x1E
|
||||
SUB_WAVEFORM_HEADER = 0x0A
|
||||
SUB_WAVEFORM_RECORD = 0x0C
|
||||
SUB_BULK_WAVEFORM = 0x5A
|
||||
SUB_COMPLIANCE = 0x1A
|
||||
SUB_UNKNOWN_2E = 0x2E
|
||||
|
||||
# Hardcoded data lengths for the two-step read protocol.
|
||||
#
|
||||
# The S3 probe response page_key is always 0x0000 — it does NOT carry the
|
||||
# data length back to us. Instead, each SUB has a fixed known payload size
|
||||
# confirmed from BW capture analysis (offset at payload[5] of the data-request
|
||||
# frame).
|
||||
#
|
||||
# Key: request SUB byte. Value: offset/length byte sent in the data-request.
|
||||
# Entries marked 🔶 are inferred from captured frames and may need adjustment.
|
||||
DATA_LENGTHS: dict[int, int] = {
|
||||
SUB_POLL: 0x30, # POLL startup data block ✅
|
||||
SUB_SERIAL_NUMBER: 0x0A, # 10-byte serial number block ✅
|
||||
SUB_FULL_CONFIG: 0x98, # 152-byte full config block ✅
|
||||
SUB_EVENT_INDEX: 0x58, # 88-byte event index ✅
|
||||
SUB_TRIGGER_CONFIG: 0x2C, # 44-byte trigger config 🔶
|
||||
SUB_UNKNOWN_2E: 0x1A, # 26 bytes, purpose TBD 🔶
|
||||
0x09: 0xCA, # 202 bytes, purpose TBD 🔶
|
||||
# SUB_COMPLIANCE (0x1A) uses a multi-step sequence with a 2090-byte total;
|
||||
# NOT handled here — requires specialised read logic.
|
||||
}
|
||||
|
||||
# Default timeout values (seconds)
|
||||
DEFAULT_RECV_TIMEOUT = 3.0
|
||||
POLL_RECV_TIMEOUT = 2.0
|
||||
|
||||
|
||||
# ── Exception ─────────────────────────────────────────────────────────────────
|
||||
|
||||
class ProtocolError(Exception):
|
||||
"""Raised when the device violates the expected protocol."""
|
||||
|
||||
|
||||
class TimeoutError(ProtocolError):
|
||||
"""Raised when no response is received within the allowed time."""
|
||||
|
||||
|
||||
class ChecksumError(ProtocolError):
|
||||
"""Raised when a received frame has a bad checksum."""
|
||||
|
||||
|
||||
class UnexpectedResponse(ProtocolError):
|
||||
"""Raised when the response SUB doesn't match what we requested."""
|
||||
|
||||
|
||||
# ── MiniMateProtocol ──────────────────────────────────────────────────────────
|
||||
|
||||
class MiniMateProtocol:
|
||||
"""
|
||||
Protocol state machine for one open connection to a MiniMate Plus device.
|
||||
|
||||
Does not own the transport — transport lifetime is managed by MiniMateClient.
|
||||
|
||||
Typical usage (via MiniMateClient — not directly):
|
||||
proto = MiniMateProtocol(transport)
|
||||
proto.startup() # POLL handshake, drain boot string
|
||||
data = proto.read(SUB_FULL_CONFIG)
|
||||
sn_data = proto.read(SUB_SERIAL_NUMBER)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
transport: BaseTransport,
|
||||
recv_timeout: float = DEFAULT_RECV_TIMEOUT,
|
||||
) -> None:
|
||||
self._transport = transport
|
||||
self._recv_timeout = recv_timeout
|
||||
self._parser = S3FrameParser()
|
||||
|
||||
# ── Public API ────────────────────────────────────────────────────────────
|
||||
|
||||
def startup(self) -> S3Frame:
|
||||
"""
|
||||
Perform the POLL startup handshake and return the POLL data frame.
|
||||
|
||||
Steps (matching §6 Session Startup Sequence):
|
||||
1. Drain any boot-string bytes ("Operating System" ASCII)
|
||||
2. Send POLL_PROBE (SUB 5B, offset=0x00)
|
||||
3. Receive probe ack (page_key is 0x0000; data length 0x30 is hardcoded)
|
||||
4. Send POLL_DATA (SUB 5B, offset=0x30)
|
||||
5. Receive data frame with "Instantel" + "MiniMate Plus" strings
|
||||
|
||||
Returns:
|
||||
The data-phase POLL response S3Frame.
|
||||
|
||||
Raises:
|
||||
ProtocolError: if either POLL step fails.
|
||||
"""
|
||||
log.debug("startup: draining boot string")
|
||||
self._drain_boot_string()
|
||||
|
||||
log.debug("startup: POLL probe")
|
||||
self._send(POLL_PROBE)
|
||||
probe_rsp = self._recv_one(
|
||||
expected_sub=_expected_rsp_sub(SUB_POLL),
|
||||
timeout=POLL_RECV_TIMEOUT,
|
||||
)
|
||||
log.debug(
|
||||
"startup: POLL probe response page_key=0x%04X", probe_rsp.page_key
|
||||
)
|
||||
|
||||
log.debug("startup: POLL data request")
|
||||
self._send(POLL_DATA)
|
||||
data_rsp = self._recv_one(
|
||||
expected_sub=_expected_rsp_sub(SUB_POLL),
|
||||
timeout=POLL_RECV_TIMEOUT,
|
||||
)
|
||||
log.debug("startup: POLL data received, %d bytes", len(data_rsp.data))
|
||||
return data_rsp
|
||||
|
||||
def read(self, sub: int) -> bytes:
|
||||
"""
|
||||
Execute a two-step paged read and return the data payload bytes.
|
||||
|
||||
Step 1: send probe frame (offset=0x00) → device sends a short ack
|
||||
Step 2: send data-request (offset=DATA_LEN) → device sends the data block
|
||||
|
||||
The S3 probe response does NOT carry the data length — page_key is always
|
||||
0x0000 in observed frames. DATA_LENGTHS holds the known fixed lengths
|
||||
derived from BW capture analysis.
|
||||
|
||||
Args:
|
||||
sub: Request SUB byte (e.g. SUB_FULL_CONFIG = 0x01).
|
||||
|
||||
Returns:
|
||||
De-stuffed data payload bytes (payload[5:] of the response frame,
|
||||
with the checksum already stripped by the parser).
|
||||
|
||||
Raises:
|
||||
ProtocolError: on timeout, bad checksum, or wrong response SUB.
|
||||
KeyError: if sub is not in DATA_LENGTHS (caller should add it).
|
||||
"""
|
||||
rsp_sub = _expected_rsp_sub(sub)
|
||||
|
||||
# Step 1 — probe (offset = 0)
|
||||
log.debug("read SUB=0x%02X: probe", sub)
|
||||
self._send(build_bw_frame(sub, 0))
|
||||
_probe = self._recv_one(expected_sub=rsp_sub) # ack; page_key always 0
|
||||
|
||||
# Look up the hardcoded data length for this SUB
|
||||
if sub not in DATA_LENGTHS:
|
||||
raise ProtocolError(
|
||||
f"No known data length for SUB=0x{sub:02X}. "
|
||||
"Add it to DATA_LENGTHS in protocol.py."
|
||||
)
|
||||
length = DATA_LENGTHS[sub]
|
||||
log.debug("read SUB=0x%02X: data request offset=0x%02X", sub, length)
|
||||
|
||||
if length == 0:
|
||||
log.warning("read SUB=0x%02X: DATA_LENGTHS entry is zero", sub)
|
||||
return b""
|
||||
|
||||
# Step 2 — data-request (offset = length)
|
||||
self._send(build_bw_frame(sub, length))
|
||||
data_rsp = self._recv_one(expected_sub=rsp_sub)
|
||||
|
||||
log.debug("read SUB=0x%02X: received %d data bytes", sub, len(data_rsp.data))
|
||||
return data_rsp.data
|
||||
|
||||
def send_keepalive(self) -> None:
|
||||
"""
|
||||
Send a single POLL_PROBE keepalive without waiting for a response.
|
||||
|
||||
Blastware sends these every ~80ms during idle. Useful if you need to
|
||||
hold the session open between real requests.
|
||||
"""
|
||||
self._send(POLL_PROBE)
|
||||
|
||||
# ── Internal helpers ──────────────────────────────────────────────────────
|
||||
|
||||
def _send(self, frame: bytes) -> None:
|
||||
"""Write a pre-built frame to the transport."""
|
||||
log.debug("TX %d bytes: %s", len(frame), frame.hex())
|
||||
self._transport.write(frame)
|
||||
|
||||
def _recv_one(
|
||||
self,
|
||||
expected_sub: Optional[int] = None,
|
||||
timeout: Optional[float] = None,
|
||||
) -> S3Frame:
|
||||
"""
|
||||
Read bytes from the transport until one complete S3 frame is parsed.
|
||||
|
||||
Feeds bytes through the streaming S3FrameParser. Keeps reading until
|
||||
a frame arrives or the deadline expires.
|
||||
|
||||
Args:
|
||||
expected_sub: If provided, raises UnexpectedResponse if the
|
||||
received frame's SUB doesn't match.
|
||||
timeout: Seconds to wait. Defaults to self._recv_timeout.
|
||||
|
||||
Returns:
|
||||
The first complete S3Frame received.
|
||||
|
||||
Raises:
|
||||
TimeoutError: if no frame arrives within the timeout.
|
||||
ChecksumError: if the frame has an invalid checksum.
|
||||
UnexpectedResponse: if expected_sub is set and doesn't match.
|
||||
"""
|
||||
deadline = time.monotonic() + (timeout or self._recv_timeout)
|
||||
self._parser.reset()
|
||||
|
||||
while time.monotonic() < deadline:
|
||||
chunk = self._transport.read(256)
|
||||
if chunk:
|
||||
log.debug("RX %d bytes: %s", len(chunk), chunk.hex())
|
||||
frames = self._parser.feed(chunk)
|
||||
if frames:
|
||||
frame = frames[0]
|
||||
self._validate_frame(frame, expected_sub)
|
||||
return frame
|
||||
else:
|
||||
time.sleep(0.005)
|
||||
|
||||
raise TimeoutError(
|
||||
f"No S3 frame received within {timeout or self._recv_timeout:.1f}s"
|
||||
+ (f" (expected SUB 0x{expected_sub:02X})" if expected_sub is not None else "")
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _validate_frame(frame: S3Frame, expected_sub: Optional[int]) -> None:
|
||||
"""Validate checksum and SUB, raising on failure."""
|
||||
if not frame.checksum_valid:
|
||||
raise ChecksumError(
|
||||
f"Bad checksum in S3 frame SUB=0x{frame.sub:02X}"
|
||||
)
|
||||
if expected_sub is not None and frame.sub != expected_sub:
|
||||
raise UnexpectedResponse(
|
||||
f"Expected SUB=0x{expected_sub:02X}, got 0x{frame.sub:02X}"
|
||||
)
|
||||
|
||||
def _drain_boot_string(self, drain_ms: int = 200) -> None:
|
||||
"""
|
||||
Read and discard any boot-string bytes ("Operating System") the device
|
||||
may send before entering binary protocol mode.
|
||||
|
||||
We simply read with a short timeout and throw the bytes away. The
|
||||
S3FrameParser's IDLE state already handles non-frame bytes gracefully,
|
||||
but it's cleaner to drain them explicitly before the first real frame.
|
||||
"""
|
||||
deadline = time.monotonic() + (drain_ms / 1000)
|
||||
discarded = 0
|
||||
while time.monotonic() < deadline:
|
||||
chunk = self._transport.read(256)
|
||||
if chunk:
|
||||
discarded += len(chunk)
|
||||
else:
|
||||
time.sleep(0.005)
|
||||
if discarded:
|
||||
log.debug("drain_boot_string: discarded %d bytes", discarded)
|
||||
258
minimateplus/transport.py
Normal file
258
minimateplus/transport.py
Normal file
@@ -0,0 +1,258 @@
|
||||
"""
|
||||
transport.py — Serial (and future TCP) transport layer for the MiniMate Plus protocol.
|
||||
|
||||
Provides a thin I/O abstraction so that protocol.py never imports pyserial directly.
|
||||
The only concrete implementation here is SerialTransport; a TcpTransport can be
|
||||
added later without touching any other layer.
|
||||
|
||||
Typical usage:
|
||||
from minimateplus.transport import SerialTransport
|
||||
|
||||
t = SerialTransport("COM5")
|
||||
t.connect()
|
||||
t.write(frame_bytes)
|
||||
data = t.read_until_idle(timeout=2.0)
|
||||
t.disconnect()
|
||||
|
||||
# or as a context manager:
|
||||
with SerialTransport("COM5") as t:
|
||||
t.write(frame_bytes)
|
||||
data = t.read_until_idle()
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
# pyserial is the only non-stdlib dependency in this project.
|
||||
# Import lazily so unit-tests that mock the transport can run without it.
|
||||
try:
|
||||
import serial # type: ignore
|
||||
except ImportError: # pragma: no cover
|
||||
serial = None # type: ignore
|
||||
|
||||
|
||||
# ── Abstract base ─────────────────────────────────────────────────────────────
|
||||
|
||||
class BaseTransport(ABC):
|
||||
"""Common interface for all transport implementations."""
|
||||
|
||||
@abstractmethod
|
||||
def connect(self) -> None:
|
||||
"""Open the underlying connection."""
|
||||
|
||||
@abstractmethod
|
||||
def disconnect(self) -> None:
|
||||
"""Close the underlying connection."""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def is_connected(self) -> bool:
|
||||
"""True while the connection is open."""
|
||||
|
||||
@abstractmethod
|
||||
def write(self, data: bytes) -> None:
|
||||
"""Write *data* bytes to the wire."""
|
||||
|
||||
@abstractmethod
|
||||
def read(self, n: int) -> bytes:
|
||||
"""
|
||||
Read up to *n* bytes. Returns immediately with whatever is available
|
||||
(may return fewer than *n* bytes, or b"" if nothing is ready).
|
||||
"""
|
||||
|
||||
# ── Context manager ───────────────────────────────────────────────────────
|
||||
|
||||
def __enter__(self) -> "BaseTransport":
|
||||
self.connect()
|
||||
return self
|
||||
|
||||
def __exit__(self, *_) -> None:
|
||||
self.disconnect()
|
||||
|
||||
# ── Higher-level read helpers ─────────────────────────────────────────────
|
||||
|
||||
def read_until_idle(
|
||||
self,
|
||||
timeout: float = 2.0,
|
||||
idle_gap: float = 0.05,
|
||||
chunk: int = 256,
|
||||
) -> bytes:
|
||||
"""
|
||||
Read bytes until the line goes quiet.
|
||||
|
||||
Keeps reading in *chunk*-sized bursts. Returns when either:
|
||||
- *timeout* seconds have elapsed since the first byte arrived, or
|
||||
- *idle_gap* seconds pass with no new bytes (line went quiet).
|
||||
|
||||
This mirrors how Blastware behaves: it waits for the seismograph to
|
||||
stop transmitting rather than counting bytes.
|
||||
|
||||
Args:
|
||||
timeout: Hard deadline (seconds) from the moment read starts.
|
||||
idle_gap: How long to wait after the last byte before declaring done.
|
||||
chunk: How many bytes to request per low-level read() call.
|
||||
|
||||
Returns:
|
||||
All bytes received as a single bytes object (may be b"" if nothing
|
||||
arrived within *timeout*).
|
||||
"""
|
||||
buf = bytearray()
|
||||
deadline = time.monotonic() + timeout
|
||||
last_rx = None
|
||||
|
||||
while time.monotonic() < deadline:
|
||||
got = self.read(chunk)
|
||||
if got:
|
||||
buf.extend(got)
|
||||
last_rx = time.monotonic()
|
||||
else:
|
||||
# Nothing ready — check idle gap
|
||||
if last_rx is not None and (time.monotonic() - last_rx) >= idle_gap:
|
||||
break
|
||||
time.sleep(0.005)
|
||||
|
||||
return bytes(buf)
|
||||
|
||||
def read_exact(self, n: int, timeout: float = 2.0) -> bytes:
|
||||
"""
|
||||
Read exactly *n* bytes or raise TimeoutError.
|
||||
|
||||
Useful when the caller already knows the expected response length
|
||||
(e.g. fixed-size ACK packets).
|
||||
"""
|
||||
buf = bytearray()
|
||||
deadline = time.monotonic() + timeout
|
||||
while len(buf) < n:
|
||||
if time.monotonic() >= deadline:
|
||||
raise TimeoutError(
|
||||
f"read_exact: wanted {n} bytes, got {len(buf)} "
|
||||
f"after {timeout:.1f}s"
|
||||
)
|
||||
got = self.read(n - len(buf))
|
||||
if got:
|
||||
buf.extend(got)
|
||||
else:
|
||||
time.sleep(0.005)
|
||||
return bytes(buf)
|
||||
|
||||
|
||||
# ── Serial transport ──────────────────────────────────────────────────────────
|
||||
|
||||
# Default baud rate confirmed from Blastware / MiniMate Plus documentation.
|
||||
DEFAULT_BAUD = 38_400
|
||||
|
||||
# pyserial serial port config matching the MiniMate Plus RS-232 spec:
|
||||
# 8 data bits, no parity, 1 stop bit (8N1).
|
||||
_SERIAL_BYTESIZE = 8 # serial.EIGHTBITS
|
||||
_SERIAL_PARITY = "N" # serial.PARITY_NONE
|
||||
_SERIAL_STOPBITS = 1 # serial.STOPBITS_ONE
|
||||
|
||||
|
||||
class SerialTransport(BaseTransport):
|
||||
"""
|
||||
pyserial-backed transport for a direct RS-232 cable connection.
|
||||
|
||||
The port is opened with a very short read timeout (10 ms) so that
|
||||
read() returns quickly and the caller can implement its own framing /
|
||||
timeout logic without blocking the whole process.
|
||||
|
||||
Args:
|
||||
port: COM port name (e.g. "COM5" on Windows, "/dev/ttyUSB0" on Linux).
|
||||
baud: Baud rate (default 38400).
|
||||
rts_cts: Enable RTS/CTS hardware flow control (default False — MiniMate
|
||||
typically uses no flow control).
|
||||
"""
|
||||
|
||||
# Internal read timeout (seconds). Short so read() is non-blocking in practice.
|
||||
_READ_TIMEOUT = 0.01
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
port: str,
|
||||
baud: int = DEFAULT_BAUD,
|
||||
rts_cts: bool = False,
|
||||
) -> None:
|
||||
if serial is None:
|
||||
raise ImportError(
|
||||
"pyserial is required for SerialTransport. "
|
||||
"Install it with: pip install pyserial"
|
||||
)
|
||||
self.port = port
|
||||
self.baud = baud
|
||||
self.rts_cts = rts_cts
|
||||
self._ser: Optional[serial.Serial] = None
|
||||
|
||||
# ── BaseTransport interface ───────────────────────────────────────────────
|
||||
|
||||
def connect(self) -> None:
|
||||
"""Open the serial port. Raises serial.SerialException on failure."""
|
||||
if self._ser and self._ser.is_open:
|
||||
return # Already open — idempotent
|
||||
self._ser = serial.Serial(
|
||||
port = self.port,
|
||||
baudrate = self.baud,
|
||||
bytesize = _SERIAL_BYTESIZE,
|
||||
parity = _SERIAL_PARITY,
|
||||
stopbits = _SERIAL_STOPBITS,
|
||||
timeout = self._READ_TIMEOUT,
|
||||
rtscts = self.rts_cts,
|
||||
xonxoff = False,
|
||||
dsrdtr = False,
|
||||
)
|
||||
# Flush any stale bytes left in device / OS buffers from a previous session
|
||||
self._ser.reset_input_buffer()
|
||||
self._ser.reset_output_buffer()
|
||||
|
||||
def disconnect(self) -> None:
|
||||
"""Close the serial port. Safe to call even if already closed."""
|
||||
if self._ser:
|
||||
try:
|
||||
self._ser.close()
|
||||
except Exception:
|
||||
pass
|
||||
self._ser = None
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
return bool(self._ser and self._ser.is_open)
|
||||
|
||||
def write(self, data: bytes) -> None:
|
||||
"""
|
||||
Write *data* to the serial port.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if not connected.
|
||||
serial.SerialException: on I/O error.
|
||||
"""
|
||||
if not self.is_connected:
|
||||
raise RuntimeError("SerialTransport.write: not connected")
|
||||
self._ser.write(data) # type: ignore[union-attr]
|
||||
self._ser.flush() # type: ignore[union-attr]
|
||||
|
||||
def read(self, n: int) -> bytes:
|
||||
"""
|
||||
Read up to *n* bytes from the serial port.
|
||||
|
||||
Returns b"" immediately if no data is available (non-blocking in
|
||||
practice thanks to the 10 ms read timeout).
|
||||
|
||||
Raises:
|
||||
RuntimeError: if not connected.
|
||||
"""
|
||||
if not self.is_connected:
|
||||
raise RuntimeError("SerialTransport.read: not connected")
|
||||
return self._ser.read(n) # type: ignore[union-attr]
|
||||
|
||||
# ── Extras ────────────────────────────────────────────────────────────────
|
||||
|
||||
def flush_input(self) -> None:
|
||||
"""Discard any unread bytes in the OS receive buffer."""
|
||||
if self.is_connected:
|
||||
self._ser.reset_input_buffer() # type: ignore[union-attr]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
state = "open" if self.is_connected else "closed"
|
||||
return f"SerialTransport({self.port!r}, baud={self.baud}, {state})"
|
||||
Reference in New Issue
Block a user