From 08c54f992c2e3f44325e7bb854514aadadf51c8b Mon Sep 17 00:00:00 2001 From: serversdwn Date: Tue, 18 Nov 2025 02:57:14 -0500 Subject: [PATCH 01/11] Update ROSTER_URL in config.ini example config --- config.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.ini b/config.ini index 5b4c3dc..e048367 100644 --- a/config.ini +++ b/config.ini @@ -2,7 +2,7 @@ # Paths SERIES3_PATH = C:\Blastware 10\Event\autocall home ROSTER_FILE = C:\SeismoEmitter\series3_roster.csv -ROSTER_URL = https://www.dropbox.com/scl/fi/gadrpjj2nif3f6q5k60zy/series3_roster.csv?rlkey=fkycemzg4s86pmlxpih4f3pkx&st=hvx0mgln&dl=1 +ROSTER_URL = https://www.dropbox.com/URL ROSTER_REFRESH_MIN_SECONDS = 0 From a03d4a1f05c661e008d0614eb752e7f29b68671c Mon Sep 17 00:00:00 2001 From: serversdwn Date: Thu, 20 Nov 2025 18:24:57 -0500 Subject: [PATCH 02/11] Add API_URL support + POST reporting logic --- series3_emitter.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/series3_emitter.py b/series3_emitter.py index deb6b6c..9ab04dc 100644 --- a/series3_emitter.py +++ b/series3_emitter.py @@ -28,6 +28,7 @@ import csv import time import configparser import urllib.request +import requests from datetime import datetime, timezone, timedelta from typing import Dict, Any, Optional, Tuple, Set, List @@ -273,6 +274,21 @@ def refresh_roster_from_url(url: str, dest: str, min_seconds: int, def cfg_get(cfg: dict, key: str, default=None): return cfg.get(key, cfg.get(key.lower(), cfg.get(key.upper(), default))) +#---Report to server --- +def report_to_server(server_url: str, uid: str, info: dict, status: str): + payload = { + "unit": uid, + "unit_type": "series3", + "timestamp": fmt_last(info["mtime"]), + "file": info["fname"], + "status": status + } + try: + requests.post(server_url, json=payload, timeout=5) + except Exception as e: + print(f"[WARN] report_to_server failed for {uid}: {e}") + + # --------------- Main loop ------------------ def main() -> None: here = os.path.dirname(__file__) or "." @@ -408,6 +424,8 @@ def main() -> None: line = "{col}{uid:<8} Missing Age: N/A Last: ---{note}{rst}".format(col=C_MIS, uid=uid, note=note_suffix, rst=C_RST) print(line) log_message(LOG_FILE, ENABLE_LOGGING, line) + if info is not None: + report_to_server(cfg["API_URL"], uid, info, status) # Bench Units (rostered but not active in field) print("\nBench Units (rostered, not active):") From 551fdae106b86da005826c746b5dab9cfa824ef3 Mon Sep 17 00:00:00 2001 From: serversdwn Date: Mon, 1 Dec 2025 16:30:08 -0500 Subject: [PATCH 03/11] v1.1 w/ api funtion added --- series3_emitter.py | 349 ++++++++++++++++++++++++++++++--------------- 1 file changed, 234 insertions(+), 115 deletions(-) diff --git a/series3_emitter.py b/series3_emitter.py index 9ab04dc..50b06c3 100644 --- a/series3_emitter.py +++ b/series3_emitter.py @@ -1,5 +1,5 @@ """ -Series 3 Emitter β€” v1.0.0 (Stable Baseline, SemVer Reset) +Series 3 Emitter β€” v1.1.0 Environment: - Python 3.8 (Windows 7 compatible) @@ -14,33 +14,32 @@ Key Features: - Compact console heartbeat with status per unit - Logging with retention auto-clean (days configurable) - Safe .MLG header sniff for unit IDs (BE#### / BA####) - -Changelog: -- Reset to semantic versioning (from legacy v5.9 beta) -- Fixed stray `note=note_suffix` bug in Unexpected Units block -- Removed duplicate imports and redundant roster load at startup -- Added startup config echo (paths + URL status) +- NEW in v1.1.0: + - Standardized SFM Telemetry JSON payload (source-agnostic) + - Periodic HTTP heartbeat POST to SFM backend """ import os import re import csv import time +import json import configparser import urllib.request -import requests +import urllib.error from datetime import datetime, timezone, timedelta from typing import Dict, Any, Optional, Tuple, Set, List +from socket import gethostname # ---------------- Config ---------------- def load_config(path: str) -> Dict[str, Any]: """Load INI with tolerant inline comments and a required [emitter] section.""" - cp = configparser.ConfigParser(inline_comment_prefixes=(';', '#')) + cp = configparser.ConfigParser(inline_comment_prefixes=(";", "#")) cp.optionxform = str # preserve key case with open(path, "r", encoding="utf-8") as f: txt = f.read() # Ensure we have a section header - if not re.search(r'^\s*\[', txt, flags=re.M): + if not re.search(r"^\s*\[", txt, flags=re.M): txt = "[emitter]\n" + txt cp.read_string(txt) sec = cp["emitter"] @@ -58,7 +57,7 @@ def load_config(path: str) -> Dict[str, Any]: v = sec.get(k, None) if v is None: return dflt - return v.strip().lower() in ("1","true","on","yes","y") + return v.strip().lower() in ("1", "true", "on", "yes", "y") return { "WATCH_PATH": get_str("SERIES3_PATH", r"C:\Blastware 10\Event\autocall home"), @@ -74,12 +73,21 @@ def load_config(path: str) -> Dict[str, Any]: "COLORIZE": get_bool("COLORIZE", False), # Win7 default off "MLG_HEADER_BYTES": max(256, min(get_int("MLG_HEADER_BYTES", 2048), 65536)), "RECENT_WARN_DAYS": get_int("RECENT_WARN_DAYS", 30), + + # API heartbeat / SFM telemetry + "API_ENABLED": get_bool("API_ENABLED", False), + "API_URL": get_str("API_URL", ""), + "API_INTERVAL_SECONDS": get_int("API_INTERVAL_SECONDS", 300), + "SOURCE_ID": get_str("SOURCE_ID", gethostname()), + "SOURCE_TYPE": get_str("SOURCE_TYPE", "series3_emitter"), } + # --------------- ANSI helpers --------------- def ansi(enabled: bool, code: str) -> str: return code if enabled else "" + # --------------- Logging -------------------- def log_message(path: str, enabled: bool, msg: str) -> None: if not enabled: @@ -93,6 +101,7 @@ def log_message(path: str, enabled: bool, msg: str) -> None: except Exception: pass + def clear_logs_if_needed(log_file: str, enabled: bool, retention_days: int) -> None: if not enabled or retention_days <= 0: return @@ -116,15 +125,18 @@ def clear_logs_if_needed(log_file: str, enabled: bool, retention_days: int) -> N except Exception: pass + # --------------- Roster --------------------- def normalize_id(uid: str) -> str: if uid is None: return "" return uid.replace(" ", "").strip().upper() + def load_roster(path: str) -> Tuple[Set[str], Set[str], Set[str], Dict[str, str]]: - """CSV tolerant of commas in notes: device_id, active, notes... - Returns: active, bench, ignored, notes_by_unit + """ + CSV tolerant of commas in notes: device_id, active, notes... + Returns: active, bench, ignored, notes_by_unit """ active: Set[str] = set() bench: Set[str] = set() @@ -133,15 +145,18 @@ def load_roster(path: str) -> Tuple[Set[str], Set[str], Set[str], Dict[str, str] if not os.path.exists(path): print("[WARN] Roster not found:", path) - return active, notes_by_unit + return active, bench, ignored, notes_by_unit + try: with open(path, "r", encoding="utf-8-sig", newline="") as f: rdr = csv.reader(f) try: headers = next(rdr) except StopIteration: - return active, notes_by_unit + return active, bench, ignored, notes_by_unit + headers = [(h or "").strip().lower() for h in headers] + def idx_of(name: str, fallbacks: List[str]) -> Optional[int]: if name in headers: return headers.index(name) @@ -149,12 +164,15 @@ def load_roster(path: str) -> Tuple[Set[str], Set[str], Set[str], Dict[str, str] if fb in headers: return headers.index(fb) return None - i_id = idx_of("device_id", ["unitid","id"]) + + i_id = idx_of("device_id", ["unitid", "id"]) i_ac = idx_of("active", []) - i_no = idx_of("notes", ["note","location"]) + i_no = idx_of("notes", ["note", "location"]) + if i_id is None or i_ac is None: print("[WARN] Roster missing device_id/active columns") - return active, notes_by_unit + return active, bench, ignored, notes_by_unit + for row in rdr: if len(row) <= max(i_id, i_ac): continue @@ -163,25 +181,28 @@ def load_roster(path: str) -> Tuple[Set[str], Set[str], Set[str], Dict[str, str] if i_no is not None: extra = row[i_no:] note = ",".join([c or "" for c in extra]).strip().rstrip(",") - notes_by_unit[uid] = note if not uid: continue - is_active = (row[i_ac] or "").strip().lower() in ("yes","y","true","1","on") + notes_by_unit[uid] = note + flag = (row[i_ac] or "").strip().lower() - if flag in ("yes","y","true","1","on"): + if flag in ("yes", "y", "true", "1", "on"): active.add(uid) - elif flag in ("no","n","off","0"): + elif flag in ("no", "n", "off", "0"): bench.add(uid) - elif flag in ("ignore","retired","old"): + elif flag in ("ignore", "retired", "old"): ignored.add(uid) except Exception as e: print("[WARN] Roster read error:", e) + return active, bench, ignored, notes_by_unit + # --------------- .MLG sniff ------------------ UNIT_BYTES_RE = re.compile(rb"(?:^|[^A-Z])(BE|BA)\d{4,5}(?:[^0-9]|$)") + def sniff_unit_from_mlg(path: str, header_bytes: int) -> Optional[str]: """Return BE####/BA#### from header bytes, or None.""" try: @@ -199,21 +220,30 @@ def sniff_unit_from_mlg(path: str, header_bytes: int) -> Optional[str]: except Exception: return None + # --------------- Scan helpers --------------- def fmt_last(ts: float) -> str: return datetime.fromtimestamp(ts, tz=timezone.utc).astimezone().strftime("%Y-%m-%d %H:%M:%S") + def fmt_age(now_epoch: float, mtime: float) -> str: mins = int((now_epoch - mtime) // 60) - if mins < 0: mins = 0 - return "{}h {}m".format(mins//60, mins%60) + if mins < 0: + mins = 0 + return "{}h {}m".format(mins // 60, mins % 60) -def scan_latest(watch: str, header_bytes: int, - cache: Dict[str, Tuple[float, str]], - recent_cutoff: float = None, - logger=None): - """Return newest .MLG per unit: {uid: {'mtime': float, 'fname': str}}""" +def scan_latest( + watch: str, + header_bytes: int, + cache: Dict[str, Tuple[float, str]], + recent_cutoff: float = None, + logger=None, +) -> Dict[str, Dict[str, Any]]: + """ + Return newest .MLG per unit: + {uid: {'mtime': float, 'fname': str, 'path': str}} + """ latest: Dict[str, Dict[str, Any]] = {} if not os.path.exists(watch): print("[WARN] Watch path not found:", watch) @@ -228,26 +258,28 @@ def scan_latest(watch: str, header_bytes: int, mtime = e.stat().st_mtime except Exception: continue + cached = cache.get(fpath) if cached is not None and cached[0] == mtime: uid = cached[1] else: - uid = sniff_unit_from_mlg(fpath, header_bytes) - if not uid: + uid = sniff_unit_from_mlg(fpath, header_bytes) + if not uid: if (recent_cutoff is not None) and (mtime >= recent_cutoff): if logger: logger(f"[unsniffable-recent] {fpath}") - continue # skip file if no unit ID found in header - cache[fpath] = (mtime, uid) + continue # skip file if no unit ID found in header + cache[fpath] = (mtime, uid) + if (uid not in latest) or (mtime > latest[uid]["mtime"]): - latest[uid] = {"mtime": mtime, "fname": e.name} + latest[uid] = {"mtime": mtime, "fname": e.name, "path": fpath} except Exception as ex: print("[WARN] Scan error:", ex) return latest + # --- Roster fetch (Dropbox/HTTPS) helper --- -def refresh_roster_from_url(url: str, dest: str, min_seconds: int, - state: dict, logger=None): +def refresh_roster_from_url(url: str, dest: str, min_seconds: int, state: dict, logger=None) -> None: now = time.time() # throttle fetches; only pull if enough time elapsed @@ -262,9 +294,10 @@ def refresh_roster_from_url(url: str, dest: str, min_seconds: int, f.write(data) state["t"] = now if logger: - from datetime import datetime - logger(f"[roster] refreshed from {url} at {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} " - f"-> {dest} ({len(data)} bytes)") + logger( + f"[roster] refreshed from {url} at " + f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} -> {dest} ({len(data)} bytes)" + ) except Exception as e: if logger: logger(f"[roster-fetch-error] {e}") @@ -274,19 +307,69 @@ def refresh_roster_from_url(url: str, dest: str, min_seconds: int, def cfg_get(cfg: dict, key: str, default=None): return cfg.get(key, cfg.get(key.lower(), cfg.get(key.upper(), default))) -#---Report to server --- -def report_to_server(server_url: str, uid: str, info: dict, status: str): - payload = { - "unit": uid, - "unit_type": "series3", - "timestamp": fmt_last(info["mtime"]), - "file": info["fname"], - "status": status - } + +# --- API heartbeat / SFM telemetry helpers --- +def send_api_payload(payload: dict, api_url: str) -> None: + if not api_url: + return + data = json.dumps(payload).encode("utf-8") + req = urllib.request.Request(api_url, data=data, headers={"Content-Type": "application/json"}) try: - requests.post(server_url, json=payload, timeout=5) - except Exception as e: - print(f"[WARN] report_to_server failed for {uid}: {e}") + with urllib.request.urlopen(req, timeout=5) as res: + print(f"[API] POST success: {res.status}") + except urllib.error.URLError as e: + print(f"[API] POST failed: {e}") + + +def build_sfm_payload(units_dict: Dict[str, Dict[str, Any]], cfg: Dict[str, Any]) -> dict: + """ + Build SFM Telemetry JSON v1 payload from latest-unit dict. + Schema is source-agnostic and future-proof. + """ + now_iso = datetime.now(timezone.utc).isoformat() + now_ts = time.time() + + payload = { + "source_id": cfg.get("SOURCE_ID", gethostname()), + "source_type": cfg.get("SOURCE_TYPE", "series3_emitter"), + "timestamp": now_iso, + "units": [], + } + + for unit_id, info in units_dict.items(): + mtime = info.get("mtime") + if mtime is not None: + last_event_iso = datetime.fromtimestamp(mtime, tz=timezone.utc).isoformat() + age_minutes = int(max(0, (now_ts - mtime) // 60)) + else: + last_event_iso = None + age_minutes = None + + file_path = info.get("path") + file_size = None + if file_path: + try: + file_size = os.path.getsize(file_path) + except Exception: + file_size = None + + payload["units"].append( + { + "unit_id": unit_id, + "last_event_time": last_event_iso, + "age_minutes": age_minutes, + "observation_method": "mlg_scan", + "event_metadata": { + "file_name": info.get("fname"), + "file_path": file_path, + "file_size_bytes": file_size, + "event_number": None, + "event_type": None, + }, + } + ) + + return payload # --------------- Main loop ------------------ @@ -305,25 +388,26 @@ def main() -> None: COLORIZE = bool(cfg["COLORIZE"]) MLG_HEADER_BYTES = int(cfg["MLG_HEADER_BYTES"]) - C_OK = ansi(COLORIZE, "\033[92m") + C_OK = ansi(COLORIZE, "\033[92m") C_PEN = ansi(COLORIZE, "\033[93m") C_MIS = ansi(COLORIZE, "\033[91m") C_UNX = ansi(COLORIZE, "\033[95m") C_RST = ansi(COLORIZE, "\033[0m") - - # --- Dropbox roster refresh (pull CSV to local cache) --- - roster_state = {} - url = str(cfg_get(cfg, "ROSTER_URL", "") or "") - # --- Dropbox roster refresh (pull CSV to local cache) --- - roster_state = {} + + # --- Dropbox roster refresh (pull CSV to local cache) --- + roster_state: Dict[str, Any] = {} url = str(cfg_get(cfg, "ROSTER_URL", "") or "") - # πŸ”Ž Patch 3: startup config echo (helps debugging) - print(f"[CFG] WATCH_PATH={WATCH_PATH} ROSTER_FILE={ROSTER_FILE} ROSTER_URL={'set' if url else 'not set'}") - # (optional, also write it to the log file) - log_message(LOG_FILE, ENABLE_LOGGING, - f"[cfg] WATCH_PATH={WATCH_PATH} ROSTER_FILE={ROSTER_FILE} ROSTER_URL={'set' if url else 'not set'}") - + print( + f"[CFG] WATCH_PATH={WATCH_PATH} ROSTER_FILE={ROSTER_FILE} " + f"ROSTER_URL={'set' if url else 'not set'}" + ) + log_message( + LOG_FILE, + ENABLE_LOGGING, + f"[cfg] WATCH_PATH={WATCH_PATH} ROSTER_FILE={ROSTER_FILE} ROSTER_URL={'set' if url else 'not set'}", + ) + if url.lower().startswith("http"): refresh_roster_from_url( url, @@ -336,28 +420,25 @@ def main() -> None: # cache for scanning sniff_cache: Dict[str, Tuple[float, str]] = {} - # Always load the (possibly refreshed) local roster + # Always load the (possibly refreshed) local roster try: active, bench, ignored, notes_by_unit = load_roster(ROSTER_FILE) except Exception as ex: log_message(LOG_FILE, ENABLE_LOGGING, f"[WARN] roster load failed: {ex}") - active = set() - bench = set() - ignored = set() - notes_by_unit = {} + active, bench, ignored, notes_by_unit = set(), set(), set(), {} - # track roster file modification time + # track roster file modification time try: roster_mtime = os.path.getmtime(ROSTER_FILE) except Exception: roster_mtime = None - + last_api_ts: float = 0.0 while True: try: now_local = datetime.now().isoformat() - now_utc = datetime.now(timezone.utc).isoformat() + now_utc = datetime.now(timezone.utc).isoformat() print("-" * 110) print("Heartbeat @ {} (Local) | {} (UTC)".format(now_local, now_utc)) print("-" * 110) @@ -379,29 +460,45 @@ def main() -> None: m = None if m is not None and m != roster_mtime: - roster_mtime = m - try: - new_active, new_bench, new_ignored, new_notes_by_unit = load_roster(ROSTER_FILE) - if new_active or new_bench or new_ignored: - active, bench, ignored, notes_by_unit = new_active, new_bench, new_ignored, new_notes_by_unit - print(f"[ROSTER] Reloaded: {len(active)} active unit(s) from {ROSTER_FILE}") - log_message(LOG_FILE, ENABLE_LOGGING, - f"[roster] reloaded {len(active)} active units") - else: - print("[ROSTER] Reload skipped β€” no valid active units in new file") - log_message(LOG_FILE, ENABLE_LOGGING, - "[roster] reload skipped β€” roster parse failed or empty") - except Exception as ex: - print(f"[ROSTER] Reload failed, keeping previous roster: {ex}") - log_message(LOG_FILE, ENABLE_LOGGING, - f"[roster] reload failed, keeping previous roster: {ex}") + roster_mtime = m + try: + new_active, new_bench, new_ignored, new_notes_by_unit = load_roster(ROSTER_FILE) + if new_active or new_bench or new_ignored: + active, bench, ignored, notes_by_unit = ( + new_active, + new_bench, + new_ignored, + new_notes_by_unit, + ) + print(f"[ROSTER] Reloaded: {len(active)} active unit(s) from {ROSTER_FILE}") + log_message( + LOG_FILE, + ENABLE_LOGGING, + f"[roster] reloaded {len(active)} active units", + ) + else: + print("[ROSTER] Reload skipped β€” no valid active units in new file") + log_message( + LOG_FILE, + ENABLE_LOGGING, + "[roster] reload skipped β€” roster parse failed or empty", + ) + except Exception as ex: + print(f"[ROSTER] Reload failed, keeping previous roster: {ex}") + log_message( + LOG_FILE, + ENABLE_LOGGING, + f"[roster] reload failed, keeping previous roster: {ex}", + ) clear_logs_if_needed(LOG_FILE, ENABLE_LOGGING, LOG_RETENTION_DAYS) recent_cutoff = time.time() - (float(cfg.get("RECENT_WARN_DAYS", 30)) * 86400) - logger = lambda m: log_message(LOG_FILE, ENABLE_LOGGING, m) - latest = scan_latest(WATCH_PATH, MLG_HEADER_BYTES, sniff_cache, recent_cutoff, logger) + logger_fn = lambda m: log_message(LOG_FILE, ENABLE_LOGGING, m) + + latest = scan_latest(WATCH_PATH, MLG_HEADER_BYTES, sniff_cache, recent_cutoff, logger_fn) now_epoch = time.time() + # Active units for uid in sorted(active): info = latest.get(uid) if info is not None: @@ -412,20 +509,30 @@ def main() -> None: status, col = "Pending", C_PEN else: status, col = "OK", C_OK + note = notes_by_unit.get(uid, "") note_suffix = f" [{note}]" if note else "" - line = ("{col}{uid:<8} {status:<8} Age: {age:<7} Last: {last} (File: {fname}){note}{rst}" - .format(col=col, uid=uid, status=status, - age=fmt_age(now_epoch, info["mtime"]), - last=fmt_last(info["mtime"]), fname=info["fname"], note=note_suffix, rst=C_RST)) + line = ( + "{col}{uid:<8} {status:<8} Age: {age:<7} Last: {last} (File: {fname}){note}{rst}".format( + col=col, + uid=uid, + status=status, + age=fmt_age(now_epoch, info["mtime"]), + last=fmt_last(info["mtime"]), + fname=info["fname"], + note=note_suffix, + rst=C_RST, + ) + ) else: note = notes_by_unit.get(uid, "") note_suffix = f" [{note}]" if note else "" - line = "{col}{uid:<8} Missing Age: N/A Last: ---{note}{rst}".format(col=C_MIS, uid=uid, note=note_suffix, rst=C_RST) + line = "{col}{uid:<8} Missing Age: N/A Last: ---{note}{rst}".format( + col=C_MIS, uid=uid, note=note_suffix, rst=C_RST + ) + print(line) log_message(LOG_FILE, ENABLE_LOGGING, line) - if info is not None: - report_to_server(cfg["API_URL"], uid, info, status) # Bench Units (rostered but not active in field) print("\nBench Units (rostered, not active):") @@ -434,36 +541,46 @@ def main() -> None: note = notes_by_unit.get(uid, "") note_suffix = f" [{note}]" if note else "" if info: - line = (f"{uid:<8} Bench Last: {fmt_last(info['mtime'])} (File: {info['fname']}){note_suffix}") + line = ( + f"{uid:<8} Bench Last: {fmt_last(info['mtime'])} " + f"(File: {info['fname']}){note_suffix}" + ) else: - line = (f"{uid:<8} Bench Last: ---{note_suffix}") + line = f"{uid:<8} Bench Last: ---{note_suffix}" print(line) log_message(LOG_FILE, ENABLE_LOGGING, "[bench] " + line) - # Ignored Units (retired, broken, or do-not-care) -# ignored_detected = [u for u in latest.keys() if u in ignored] -# if ignored_detected: -# print("\nIgnored Units:") -# for uid in sorted(ignored_detected): -# info = latest[uid] -# note = notes_by_unit.get(uid, "") -# note_suffix = f" [{note}]" if note else "" -# line = (f"{uid:<8} Ignored Last: {fmt_last(info['mtime'])} (File: {info['fname']}){note_suffix}") -# print(line) -# log_message(LOG_FILE, ENABLE_LOGGING, "[ignored] " + line) + # Unexpected units unexpected = [ - u for u in latest.keys() + u + for u in latest.keys() if u not in active and u not in bench and u not in ignored and u not in notes_by_unit ] if unexpected: print("\nUnexpected Units Detected:") for uid in sorted(unexpected): info = latest[uid] - line = ("{col}{uid:<8} Age: - Last: {last} (File: {fname}){rst}" - .format(col=C_UNX, uid=uid, last=fmt_last(info["mtime"]), fname=info["fname"], rst=C_RST)) + line = ( + "{col}{uid:<8} Age: - Last: {last} (File: {fname}){rst}".format( + col=C_UNX, + uid=uid, + last=fmt_last(info["mtime"]), + fname=info["fname"], + rst=C_RST, + ) + ) print(line) log_message(LOG_FILE, ENABLE_LOGGING, "[unexpected] " + line) + # ---- API heartbeat to SFM ---- + if cfg.get("API_ENABLED", False): + now_ts = time.time() + interval = int(cfg.get("API_INTERVAL_SECONDS", 300)) + if now_ts - last_api_ts >= interval: + payload = build_sfm_payload(latest, cfg) + send_api_payload(payload, cfg.get("API_URL", "")) + last_api_ts = now_ts + except KeyboardInterrupt: print("\nStopping...") break @@ -471,7 +588,9 @@ def main() -> None: err = "[loop-error] {}".format(e) print(err) log_message(LOG_FILE, ENABLE_LOGGING, err) + time.sleep(SCAN_INTERVAL) - + + if __name__ == "__main__": main() From 9074277ff33ba227a9d5c15b70aa3055585a55c9 Mon Sep 17 00:00:00 2001 From: serversdwn Date: Tue, 2 Dec 2025 01:31:37 -0500 Subject: [PATCH 04/11] config update --- config.ini | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/config.ini b/config.ini index e048367..b5f1497 100644 --- a/config.ini +++ b/config.ini @@ -1,4 +1,12 @@ [emitter] + +# --- API Heartbeat Settings --- +API_ENABLED = true +API_URL = http://10.0.0.40:8001/api/series3/heartbeat +API_INTERVAL_SECONDS = 300 +SOURCE_ID = dl2-series3 +SOURCE_TYPE = series3_emitter + # Paths SERIES3_PATH = C:\Blastware 10\Event\autocall home ROSTER_FILE = C:\SeismoEmitter\series3_roster.csv From 73204ee92e97333e90f9d8531bc0137e0295b940 Mon Sep 17 00:00:00 2001 From: serversdwn Date: Thu, 4 Dec 2025 16:22:31 -0500 Subject: [PATCH 05/11] Roster deprecated --- series3_emitter.py | 310 +++++++++------------------------------------ 1 file changed, 57 insertions(+), 253 deletions(-) diff --git a/series3_emitter.py b/series3_emitter.py index 50b06c3..e8218e2 100644 --- a/series3_emitter.py +++ b/series3_emitter.py @@ -1,36 +1,34 @@ """ -Series 3 Emitter β€” v1.1.0 +Series 3 Emitter β€” v1.2.0 Environment: - Python 3.8 (Windows 7 compatible) - Runs on DL2 with Blastware 10 event path Key Features: -- Atomic roster downloads from Dropbox (no partial files) -- Automatic roster refresh from Dropbox at configurable interval -- Automatic hot-reload into memory when roster CSV changes -- Failsafe reload: keeps previous roster if new file is invalid or empty - Config-driven paths, intervals, and logging - Compact console heartbeat with status per unit - Logging with retention auto-clean (days configurable) - Safe .MLG header sniff for unit IDs (BE#### / BA####) -- NEW in v1.1.0: - - Standardized SFM Telemetry JSON payload (source-agnostic) - - Periodic HTTP heartbeat POST to SFM backend +- Standardized SFM Telemetry JSON payload (source-agnostic) +- Periodic HTTP heartbeat POST to SFM backend +- NEW in v1.2.0: + - No local roster / CSV dependency + - Only scans .MLG files newer than MAX_EVENT_AGE_DAYS """ import os import re -import csv import time import json import configparser import urllib.request import urllib.error from datetime import datetime, timezone, timedelta -from typing import Dict, Any, Optional, Tuple, Set, List +from typing import Dict, Any, Optional, Tuple from socket import gethostname + # ---------------- Config ---------------- def load_config(path: str) -> Dict[str, Any]: """Load INI with tolerant inline comments and a required [emitter] section.""" @@ -61,9 +59,6 @@ def load_config(path: str) -> Dict[str, Any]: return { "WATCH_PATH": get_str("SERIES3_PATH", r"C:\Blastware 10\Event\autocall home"), - "ROSTER_FILE": get_str("ROSTER_FILE", r"C:\SeismoEmitter\series3_roster.csv"), - "ROSTER_URL": get_str("ROSTER_URL", ""), - "ROSTER_REFRESH_MIN_SECONDS": get_int("ROSTER_REFRESH_MIN_SECONDS", 300), "SCAN_INTERVAL": get_int("SCAN_INTERVAL_SECONDS", 300), "OK_HOURS": float(get_int("OK_HOURS", 12)), "MISSING_HOURS": float(get_int("MISSING_HOURS", 24)), @@ -73,6 +68,7 @@ def load_config(path: str) -> Dict[str, Any]: "COLORIZE": get_bool("COLORIZE", False), # Win7 default off "MLG_HEADER_BYTES": max(256, min(get_int("MLG_HEADER_BYTES", 2048), 65536)), "RECENT_WARN_DAYS": get_int("RECENT_WARN_DAYS", 30), + "MAX_EVENT_AGE_DAYS": get_int("MAX_EVENT_AGE_DAYS", 365), # API heartbeat / SFM telemetry "API_ENABLED": get_bool("API_ENABLED", False), @@ -99,6 +95,7 @@ def log_message(path: str, enabled: bool, msg: str) -> None: with open(path, "a", encoding="utf-8") as f: f.write("{} {}\n".format(datetime.now(timezone.utc).isoformat(), msg)) except Exception: + # Logging must never crash the emitter pass @@ -126,79 +123,6 @@ def clear_logs_if_needed(log_file: str, enabled: bool, retention_days: int) -> N pass -# --------------- Roster --------------------- -def normalize_id(uid: str) -> str: - if uid is None: - return "" - return uid.replace(" ", "").strip().upper() - - -def load_roster(path: str) -> Tuple[Set[str], Set[str], Set[str], Dict[str, str]]: - """ - CSV tolerant of commas in notes: device_id, active, notes... - Returns: active, bench, ignored, notes_by_unit - """ - active: Set[str] = set() - bench: Set[str] = set() - ignored: Set[str] = set() - notes_by_unit: Dict[str, str] = {} - - if not os.path.exists(path): - print("[WARN] Roster not found:", path) - return active, bench, ignored, notes_by_unit - - try: - with open(path, "r", encoding="utf-8-sig", newline="") as f: - rdr = csv.reader(f) - try: - headers = next(rdr) - except StopIteration: - return active, bench, ignored, notes_by_unit - - headers = [(h or "").strip().lower() for h in headers] - - def idx_of(name: str, fallbacks: List[str]) -> Optional[int]: - if name in headers: - return headers.index(name) - for fb in fallbacks: - if fb in headers: - return headers.index(fb) - return None - - i_id = idx_of("device_id", ["unitid", "id"]) - i_ac = idx_of("active", []) - i_no = idx_of("notes", ["note", "location"]) - - if i_id is None or i_ac is None: - print("[WARN] Roster missing device_id/active columns") - return active, bench, ignored, notes_by_unit - - for row in rdr: - if len(row) <= max(i_id, i_ac): - continue - uid = normalize_id(row[i_id]) - note = "" - if i_no is not None: - extra = row[i_no:] - note = ",".join([c or "" for c in extra]).strip().rstrip(",") - if not uid: - continue - notes_by_unit[uid] = note - - flag = (row[i_ac] or "").strip().lower() - if flag in ("yes", "y", "true", "1", "on"): - active.add(uid) - elif flag in ("no", "n", "off", "0"): - bench.add(uid) - elif flag in ("ignore", "retired", "old"): - ignored.add(uid) - - except Exception as e: - print("[WARN] Roster read error:", e) - - return active, bench, ignored, notes_by_unit - - # --------------- .MLG sniff ------------------ UNIT_BYTES_RE = re.compile(rb"(?:^|[^A-Z])(BE|BA)\d{4,5}(?:[^0-9]|$)") @@ -237,17 +161,23 @@ def scan_latest( watch: str, header_bytes: int, cache: Dict[str, Tuple[float, str]], - recent_cutoff: float = None, + recent_cutoff: float, + max_age_days: int, logger=None, ) -> Dict[str, Dict[str, Any]]: """ - Return newest .MLG per unit: + Return newest .MLG per unit, only for files newer than max_age_days: {uid: {'mtime': float, 'fname': str, 'path': str}} """ latest: Dict[str, Dict[str, Any]] = {} if not os.path.exists(watch): print("[WARN] Watch path not found:", watch) return latest + + now_ts = time.time() + max_age_days = max(1, int(max_age_days)) # sanity floor + max_age_seconds = max_age_days * 86400.0 + try: with os.scandir(watch) as it: for e in it: @@ -259,12 +189,20 @@ def scan_latest( except Exception: continue + # Skip very old events (beyond retention window) + age_seconds = now_ts - mtime + if age_seconds < 0: + age_seconds = 0 + if age_seconds > max_age_seconds: + continue # too old, ignore this file + cached = cache.get(fpath) if cached is not None and cached[0] == mtime: uid = cached[1] else: uid = sniff_unit_from_mlg(fpath, header_bytes) if not uid: + # If unsniffable but very recent, log for later inspection if (recent_cutoff is not None) and (mtime >= recent_cutoff): if logger: logger(f"[unsniffable-recent] {fpath}") @@ -278,36 +216,6 @@ def scan_latest( return latest -# --- Roster fetch (Dropbox/HTTPS) helper --- -def refresh_roster_from_url(url: str, dest: str, min_seconds: int, state: dict, logger=None) -> None: - now = time.time() - - # throttle fetches; only pull if enough time elapsed - if now - state.get("t", 0) < max(0, int(min_seconds or 0)): - return - - try: - with urllib.request.urlopen(url, timeout=15) as r: - data = r.read() - if data and data.strip(): - with open(dest, "wb") as f: - f.write(data) - state["t"] = now - if logger: - logger( - f"[roster] refreshed from {url} at " - f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} -> {dest} ({len(data)} bytes)" - ) - except Exception as e: - if logger: - logger(f"[roster-fetch-error] {e}") - - -# --- config helper: case-insensitive key lookup --- -def cfg_get(cfg: dict, key: str, default=None): - return cfg.get(key, cfg.get(key.lower(), cfg.get(key.upper(), default))) - - # --- API heartbeat / SFM telemetry helpers --- def send_api_payload(payload: dict, api_url: str) -> None: if not api_url: @@ -378,7 +286,6 @@ def main() -> None: cfg = load_config(os.path.join(here, "config.ini")) WATCH_PATH = cfg["WATCH_PATH"] - ROSTER_FILE = cfg["ROSTER_FILE"] SCAN_INTERVAL = int(cfg["SCAN_INTERVAL"]) OK_HOURS = float(cfg["OK_HOURS"]) MISSING_HOURS = float(cfg["MISSING_HOURS"]) @@ -387,52 +294,30 @@ def main() -> None: LOG_RETENTION_DAYS = int(cfg["LOG_RETENTION_DAYS"]) COLORIZE = bool(cfg["COLORIZE"]) MLG_HEADER_BYTES = int(cfg["MLG_HEADER_BYTES"]) + RECENT_WARN_DAYS = int(cfg["RECENT_WARN_DAYS"]) + MAX_EVENT_AGE_DAYS = int(cfg["MAX_EVENT_AGE_DAYS"]) C_OK = ansi(COLORIZE, "\033[92m") C_PEN = ansi(COLORIZE, "\033[93m") C_MIS = ansi(COLORIZE, "\033[91m") - C_UNX = ansi(COLORIZE, "\033[95m") C_RST = ansi(COLORIZE, "\033[0m") - # --- Dropbox roster refresh (pull CSV to local cache) --- - roster_state: Dict[str, Any] = {} - url = str(cfg_get(cfg, "ROSTER_URL", "") or "") - print( - f"[CFG] WATCH_PATH={WATCH_PATH} ROSTER_FILE={ROSTER_FILE} " - f"ROSTER_URL={'set' if url else 'not set'}" + "[CFG] WATCH_PATH={} SCAN_INTERVAL={}s MAX_EVENT_AGE_DAYS={} API_ENABLED={}".format( + WATCH_PATH, SCAN_INTERVAL, MAX_EVENT_AGE_DAYS, bool(cfg.get("API_ENABLED", False)) + ) ) log_message( LOG_FILE, ENABLE_LOGGING, - f"[cfg] WATCH_PATH={WATCH_PATH} ROSTER_FILE={ROSTER_FILE} ROSTER_URL={'set' if url else 'not set'}", + "[cfg] WATCH_PATH={} SCAN_INTERVAL={} MAX_EVENT_AGE_DAYS={} API_ENABLED={}".format( + WATCH_PATH, SCAN_INTERVAL, MAX_EVENT_AGE_DAYS, bool(cfg.get("API_ENABLED", False)) + ), ) - if url.lower().startswith("http"): - refresh_roster_from_url( - url, - ROSTER_FILE, - int(cfg_get(cfg, "ROSTER_REFRESH_MIN_SECONDS", 300)), - roster_state, - lambda m: log_message(LOG_FILE, ENABLE_LOGGING, m), - ) - # cache for scanning sniff_cache: Dict[str, Tuple[float, str]] = {} - # Always load the (possibly refreshed) local roster - try: - active, bench, ignored, notes_by_unit = load_roster(ROSTER_FILE) - except Exception as ex: - log_message(LOG_FILE, ENABLE_LOGGING, f"[WARN] roster load failed: {ex}") - active, bench, ignored, notes_by_unit = set(), set(), set(), {} - - # track roster file modification time - try: - roster_mtime = os.path.getmtime(ROSTER_FILE) - except Exception: - roster_mtime = None - last_api_ts: float = 0.0 while True: @@ -443,65 +328,25 @@ def main() -> None: print("Heartbeat @ {} (Local) | {} (UTC)".format(now_local, now_utc)) print("-" * 110) - # Periodically refresh roster file from Dropbox - if url.lower().startswith("http"): - refresh_roster_from_url( - url, - ROSTER_FILE, - int(cfg_get(cfg, "ROSTER_REFRESH_MIN_SECONDS", 300)), - roster_state, - lambda m: log_message(LOG_FILE, ENABLE_LOGGING, m), - ) - - # Reload roster into memory if the file changed - try: - m = os.path.getmtime(ROSTER_FILE) - except Exception: - m = None - - if m is not None and m != roster_mtime: - roster_mtime = m - try: - new_active, new_bench, new_ignored, new_notes_by_unit = load_roster(ROSTER_FILE) - if new_active or new_bench or new_ignored: - active, bench, ignored, notes_by_unit = ( - new_active, - new_bench, - new_ignored, - new_notes_by_unit, - ) - print(f"[ROSTER] Reloaded: {len(active)} active unit(s) from {ROSTER_FILE}") - log_message( - LOG_FILE, - ENABLE_LOGGING, - f"[roster] reloaded {len(active)} active units", - ) - else: - print("[ROSTER] Reload skipped β€” no valid active units in new file") - log_message( - LOG_FILE, - ENABLE_LOGGING, - "[roster] reload skipped β€” roster parse failed or empty", - ) - except Exception as ex: - print(f"[ROSTER] Reload failed, keeping previous roster: {ex}") - log_message( - LOG_FILE, - ENABLE_LOGGING, - f"[roster] reload failed, keeping previous roster: {ex}", - ) - clear_logs_if_needed(LOG_FILE, ENABLE_LOGGING, LOG_RETENTION_DAYS) - recent_cutoff = time.time() - (float(cfg.get("RECENT_WARN_DAYS", 30)) * 86400) + recent_cutoff = time.time() - (float(RECENT_WARN_DAYS) * 86400) logger_fn = lambda m: log_message(LOG_FILE, ENABLE_LOGGING, m) - latest = scan_latest(WATCH_PATH, MLG_HEADER_BYTES, sniff_cache, recent_cutoff, logger_fn) + latest = scan_latest( + WATCH_PATH, + MLG_HEADER_BYTES, + sniff_cache, + recent_cutoff, + MAX_EVENT_AGE_DAYS, + logger_fn, + ) now_epoch = time.time() - # Active units - for uid in sorted(active): - info = latest.get(uid) - if info is not None: + # Detected units summary (no roster dependency) + if latest: + print("\nDetected Units (within last {} days):".format(MAX_EVENT_AGE_DAYS)) + for uid in sorted(latest.keys()): + info = latest[uid] age_hours = (now_epoch - info["mtime"]) / 3600.0 if age_hours > MISSING_HOURS: status, col = "Missing", C_MIS @@ -510,67 +355,26 @@ def main() -> None: else: status, col = "OK", C_OK - note = notes_by_unit.get(uid, "") - note_suffix = f" [{note}]" if note else "" line = ( - "{col}{uid:<8} {status:<8} Age: {age:<7} Last: {last} (File: {fname}){note}{rst}".format( + "{col}{uid:<8} {status:<8} Age: {age:<7} Last: {last} (File: {fname}){rst}".format( col=col, uid=uid, status=status, age=fmt_age(now_epoch, info["mtime"]), last=fmt_last(info["mtime"]), fname=info["fname"], - note=note_suffix, - rst=C_RST, - ) - ) - else: - note = notes_by_unit.get(uid, "") - note_suffix = f" [{note}]" if note else "" - line = "{col}{uid:<8} Missing Age: N/A Last: ---{note}{rst}".format( - col=C_MIS, uid=uid, note=note_suffix, rst=C_RST - ) - - print(line) - log_message(LOG_FILE, ENABLE_LOGGING, line) - - # Bench Units (rostered but not active in field) - print("\nBench Units (rostered, not active):") - for uid in sorted(bench): - info = latest.get(uid) - note = notes_by_unit.get(uid, "") - note_suffix = f" [{note}]" if note else "" - if info: - line = ( - f"{uid:<8} Bench Last: {fmt_last(info['mtime'])} " - f"(File: {info['fname']}){note_suffix}" - ) - else: - line = f"{uid:<8} Bench Last: ---{note_suffix}" - print(line) - log_message(LOG_FILE, ENABLE_LOGGING, "[bench] " + line) - - # Unexpected units - unexpected = [ - u - for u in latest.keys() - if u not in active and u not in bench and u not in ignored and u not in notes_by_unit - ] - if unexpected: - print("\nUnexpected Units Detected:") - for uid in sorted(unexpected): - info = latest[uid] - line = ( - "{col}{uid:<8} Age: - Last: {last} (File: {fname}){rst}".format( - col=C_UNX, - uid=uid, - last=fmt_last(info["mtime"]), - fname=info["fname"], rst=C_RST, ) ) print(line) - log_message(LOG_FILE, ENABLE_LOGGING, "[unexpected] " + line) + log_message(LOG_FILE, ENABLE_LOGGING, line) + else: + print("\nNo recent .MLG activity found within last {} days.".format(MAX_EVENT_AGE_DAYS)) + log_message( + LOG_FILE, + ENABLE_LOGGING, + "[info] no recent MLG activity within {} days".format(MAX_EVENT_AGE_DAYS), + ) # ---- API heartbeat to SFM ---- if cfg.get("API_ENABLED", False): From f29943f8e4100913c83522ac997e28f41fa8cb56 Mon Sep 17 00:00:00 2001 From: serversdwn Date: Thu, 4 Dec 2025 17:16:59 -0500 Subject: [PATCH 06/11] Add version information to README --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 290f1f5..be23788 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +Series3-Emitter v1.2 + A lightweight Python script that monitors Instantel **Series 3 (Minimate)** call-in activity on a Blastware server. It scans the event folder, reads `.MLG` headers to identify unit IDs, and prints a live status table showing: @@ -76,4 +78,4 @@ See `CHANGELOG.md` for details. ## License Private / internal project. -``` \ No newline at end of file +``` From 62a4ca2b1c5651b1493e77e4aae92290f9d4e819 Mon Sep 17 00:00:00 2001 From: serversdwn Date: Thu, 4 Dec 2025 17:22:30 -0500 Subject: [PATCH 07/11] Update README header formatting --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index be23788..247013d 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -Series3-Emitter v1.2 +# Series3-Emitter v1.2 A lightweight Python script that monitors Instantel **Series 3 (Minimate)** call-in activity on a Blastware server. From 58ba506f5470f99fd6cbe5549606b363ceded7dd Mon Sep 17 00:00:00 2001 From: serversdwn Date: Thu, 4 Dec 2025 17:24:20 -0500 Subject: [PATCH 08/11] docs updated --- CHANGELOG.md | 53 ++++++++++++++++++++++++++++++++++++++++++++++------ README.md | 37 +++++++++++++++++++----------------- config.ini | 4 +--- 3 files changed, 68 insertions(+), 26 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2d7981a..1abd290 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,27 +10,68 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 --- -## [1.0.0] – 2025-09-02 +## [1.2.0] - 2025-12-04 + +### Changed +- Removed roster CSV dependency and all Dropbox refresh/hot-reload logic; heartbeat now only enumerates `.MLG` files. +- Added `MAX_EVENT_AGE_DAYS` filter to ignore stale events and log when no recent activity exists. +- Simplified heartbeat output/logging to show detected units only; logging hardened to never crash the emitter. + +--- + +## [1.1.1] - 2025-12-02 + +### Added +- Example `config.ini` now ships with API heartbeat settings enabled (`API_ENABLED`, `API_URL`, `API_INTERVAL_SECONDS`, `SOURCE_ID`, `SOURCE_TYPE`). + +--- + +## [1.1.0] - 2025-12-01 + +### Added +- Standardized SFM telemetry payload builder and periodic HTTP heartbeat POST via `urllib`. +- Config support for API heartbeat (`API_ENABLED`, `API_URL`, `API_INTERVAL_SECONDS`, `SOURCE_ID`, `SOURCE_TYPE`); payload includes file path/size metadata. + +### Changed +- Refactored scanner to retain file paths and header sniff cache; reformatted logging/ANSI handling. + +--- + +## [1.0.1] - 2025-11-20 + +### Added +- `API_URL` config key and `report_to_server` per-unit POST hook (adds `requests` dependency). + +### Changed +- Example `config.ini` roster URL updated; merged into `main`. + +--- + +## [1.0.0] - 2025-11-17 ### Added - **Automatic roster refresh** from Dropbox at a configurable interval (`ROSTER_REFRESH_MIN_SECONDS`). - **Hot-reload** of roster file without restarting the script. - **Failsafe reload:** if the new roster is missing or invalid, the previous good roster is retained. -- **Atomic roster downloads** (temp file β†’ replace) to avoid partial/corrupted CSVs. +- **Atomic roster downloads** (temp file in-place replace) to avoid partial/corrupted CSVs. - **Startup config echo** printing WATCH_PATH, ROSTER_FILE, and ROSTER_URL visibility. - **Active / Bench / Ignored** unit categories for clearer fleet status mapping. ### Fixed -- Removed stray `note=note_suffix` bug in the β€œUnexpected Units” section. +- Removed stray `note=note_suffix` bug in the Unexpected Units section. - Removed duplicate `import time`. - Removed duplicate roster load during startup (roster now loads once). - Cleaned indentation for Python 3.8 compatibility. ### Changed -- Reset versioning from legacy `v5.9 beta` β†’ **v1.0.0** (clean semver baseline). +- Reset versioning from legacy `v5.9 beta` to **v1.0.0** (clean semver baseline). - Main script normalized as `series3_emitter.py`. --- -[Unreleased]: https://example.com/compare/v1.0.0...HEAD -[1.0.0]: https://example.com/releases/v1.0.0 \ No newline at end of file +[Unreleased]: https://example.com/compare/v1.2.0...HEAD +[1.2.0]: https://example.com/releases/v1.2.0 +[1.1.1]: https://example.com/releases/v1.1.1 +[1.1.0]: https://example.com/releases/v1.1.0 +[1.0.1]: https://example.com/releases/v1.0.1 +[1.0.0]: https://example.com/releases/v1.0.0 diff --git a/README.md b/README.md index 290f1f5..401eaa2 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,8 @@ It scans the event folder, reads `.MLG` headers to identify unit IDs, and prints - Last event received - Age since last call-in - OK / Pending / Missing states -- Bench and ignored units -- Unexpected units -- Notes from the roster file +- Detected units (no roster required) +- Optional API heartbeat to Seismograph Fleet Manager backend This script is part of the larger **Seismograph Fleet Manager** project. @@ -17,7 +16,6 @@ This script is part of the larger **Seismograph Fleet Manager** project. - Python 3.8 (Windows 7 compatible) - Blastware 10 event folder available locally -- `series3_roster.csv` in the configured path - `config.ini` in the same directory as the script Install dependencies with: @@ -34,12 +32,11 @@ Run the emitter from the folder containing the script: The script will: -1. Load the roster file -2. Scan the Blastware event folder for `.MLG` files -3. Sniff each file header for the unit ID -4. Print a status line for each active unit -5. Refresh the roster automatically if `ROSTER_URL` is set -6. Write logs into the `emitter_logs/` folder +1. Scan the Blastware event folder for `.MLG` files (within a max age window). +2. Sniff each file header for the unit ID. +3. Print a status line for each detected unit (OK / Pending / Missing). +4. Optionally POST a heartbeat payload on an interval when `API_ENABLED=true`. +5. Write logs into the `emitter_logs/` folder and auto-clean old logs. --- @@ -49,11 +46,18 @@ All settings are stored in `config.ini`. Key fields: -- `SERIES3_PATH` – folder containing `.MLG` files -- `ROSTER_FILE` – path to the local roster CSV -- `ROSTER_URL` – optional URL for automatic roster downloads -- `SCAN_INTERVAL_SECONDS` – how often to scan -- `OK_HOURS` / `MISSING_HOURS` – thresholds for status +- `SERIES3_PATH` β€” folder containing `.MLG` files +- `SCAN_INTERVAL_SECONDS` β€” how often to scan +- `OK_HOURS` / `MISSING_HOURS` β€” thresholds for status +- `MLG_HEADER_BYTES` β€” how many bytes to sniff from each `.MLG` header +- `RECENT_WARN_DAYS` β€” log unsniffable files newer than this window +- `MAX_EVENT_AGE_DAYS` β€” ignore events older than this many days +- `API_ENABLED` β€” enable/disable heartbeat POST +- `API_URL` β€” heartbeat endpoint +- `API_INTERVAL_SECONDS` β€” heartbeat frequency +- `SOURCE_ID` / `SOURCE_TYPE` β€” identifiers included in the API payload +- `LOG_RETENTION_DAYS` β€” auto-delete logs older than this many days +- `COLORIZE` β€” ANSI color output (off by default for Win7) --- @@ -68,7 +72,7 @@ Git ignores all log files but keeps the folder itself. This repo follows **Semantic Versioning (SemVer)**. -Current release: **v1.0.0** – stable baseline emitter. +Current release: **v1.2.0** β€” rosterless scanning with API heartbeat option. See `CHANGELOG.md` for details. --- @@ -76,4 +80,3 @@ See `CHANGELOG.md` for details. ## License Private / internal project. -``` \ No newline at end of file diff --git a/config.ini b/config.ini index b5f1497..3037099 100644 --- a/config.ini +++ b/config.ini @@ -9,9 +9,7 @@ SOURCE_TYPE = series3_emitter # Paths SERIES3_PATH = C:\Blastware 10\Event\autocall home -ROSTER_FILE = C:\SeismoEmitter\series3_roster.csv -ROSTER_URL = https://www.dropbox.com/URL -ROSTER_REFRESH_MIN_SECONDS = 0 +MAX_EVENT_AGE_DAYS = 365 # Scanning From 44476248c3448381932f182c29aefb410da51ee4 Mon Sep 17 00:00:00 2001 From: serversdwn Date: Tue, 3 Mar 2026 16:09:39 -0500 Subject: [PATCH 09/11] chore: config.ini now added to git ignore. See config template for schema --- config.ini | 34 ---------------------------------- 1 file changed, 34 deletions(-) delete mode 100644 config.ini diff --git a/config.ini b/config.ini deleted file mode 100644 index 3037099..0000000 --- a/config.ini +++ /dev/null @@ -1,34 +0,0 @@ -[emitter] - -# --- API Heartbeat Settings --- -API_ENABLED = true -API_URL = http://10.0.0.40:8001/api/series3/heartbeat -API_INTERVAL_SECONDS = 300 -SOURCE_ID = dl2-series3 -SOURCE_TYPE = series3_emitter - -# Paths -SERIES3_PATH = C:\Blastware 10\Event\autocall home -MAX_EVENT_AGE_DAYS = 365 - - -# Scanning -SCAN_INTERVAL_SECONDS = 30 -OK_HOURS = 12 -MISSING_HOURS = 24 - -# Logging -ENABLE_LOGGING = True -LOG_FILE = C:\SeismoEmitter\emitter_logs\series3_emitter.log -LOG_RETENTION_DAYS = 30 - -# Console colors -COLORIZE = FALSE - -# .MLG parsing -MLG_HEADER_BYTES = 2048 ; used for unit-id extraction - -# Deep sniff -DEEP_SNIFF = True ; toggle deep sniff on/off -SNIFF_BYTES = 65536 ; max bytes to scan for Notes/Cal - From 0d5fa7677f1569ea1c84892264af077bc0dc2f6b Mon Sep 17 00:00:00 2001 From: serversdwn Date: Tue, 3 Mar 2026 16:13:01 -0500 Subject: [PATCH 10/11] chore: Config-template.ini added to repo. .gitignore updated. --- .gitignore | 2 ++ config-template.ini | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 config-template.ini diff --git a/.gitignore b/.gitignore index 175dd94..4ec0bd9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +config.ini + # ------------------------- # Python ignores # ------------------------- diff --git a/config-template.ini b/config-template.ini new file mode 100644 index 0000000..72aad8c --- /dev/null +++ b/config-template.ini @@ -0,0 +1,34 @@ +[emitter] + +# --- API Heartbeat Settings --- +API_ENABLED = true +API_URL = +API_INTERVAL_SECONDS = 300 +SOURCE_ID = #computer that is running agent. +SOURCE_TYPE = series3_agent + +# Paths +SERIES3_PATH = C:\Blastware 10\Event\autocall home +MAX_EVENT_AGE_DAYS = 365 + + +# Scanning +SCAN_INTERVAL_SECONDS = 30 +OK_HOURS = 12 +MISSING_HOURS = 24 + +# Logging +ENABLE_LOGGING = True +LOG_FILE = C:\SeismoEmitter\emitter_logs\series3_emitter.log +LOG_RETENTION_DAYS = 30 + +# Console colors - (Doesn't work on windows 7) +COLORIZE = FALSE + +# .MLG parsing +MLG_HEADER_BYTES = 2048 ; used for unit-id extraction + +# Deep sniff +DEEP_SNIFF = True ; toggle deep sniff on/off +SNIFF_BYTES = 65536 ; max bytes to scan for Notes/Cal + From d404bf654235055e2b7513a527377a6d3ee67715 Mon Sep 17 00:00:00 2001 From: serversdwn Date: Tue, 3 Mar 2026 17:10:47 -0500 Subject: [PATCH 11/11] refactor: Rename emitter to agent, update related files and logging paths --- .gitignore | 4 ++-- CHANGELOG.md | 14 ++++++++++---- README.md | 12 ++++++------ README_DL2.md | 8 ++++---- config-template.ini | 4 ++-- series3_emitter.py => series3_agent.py | 16 ++++++++-------- 6 files changed, 32 insertions(+), 26 deletions(-) rename series3_emitter.py => series3_agent.py (96%) diff --git a/.gitignore b/.gitignore index 4ec0bd9..6e3e0a3 100644 --- a/.gitignore +++ b/.gitignore @@ -24,8 +24,8 @@ dist/ # ------------------------- # Logs + runtime artifacts # ------------------------- -emitter_logs/* -!emitter_logs/.gitkeep # keep the folder but ignore its contents +agent_logs/* +!agent_logs/.gitkeep # keep the folder but ignore its contents *.log diff --git a/CHANGELOG.md b/CHANGELOG.md index 1abd290..b93df6a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,18 @@ # Changelog -All notable changes to **Series3 Emitter** will be documented in this file. +All notable changes to **Series3 Agent** will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). --- -## [Unreleased] +## [1.2.1] - 2026-03-03 + +### Changed +- Changed the name of the program to "series3-agent", this was done to align with the s4/thor agent and because it represents the program's functionality better. +- All instances of "emitter" changed to agent. +- config.ini added to .gitignore, replaced with a template example file. +- README.md updated to reflect changes. --- @@ -15,7 +21,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Removed roster CSV dependency and all Dropbox refresh/hot-reload logic; heartbeat now only enumerates `.MLG` files. - Added `MAX_EVENT_AGE_DAYS` filter to ignore stale events and log when no recent activity exists. -- Simplified heartbeat output/logging to show detected units only; logging hardened to never crash the emitter. +- Simplified heartbeat output/logging to show detected units only; logging hardened to never crash the agent. --- @@ -65,7 +71,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Reset versioning from legacy `v5.9 beta` to **v1.0.0** (clean semver baseline). -- Main script normalized as `series3_emitter.py`. +- Main script normalized as `series3_emitter.py` (later renamed to `series3_agent.py` in v1.2.1). --- diff --git a/README.md b/README.md index b066e37..69b6a90 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Series3-Emitter v1.2 +# Series3 Ingest Agent v1.2 A lightweight Python script that monitors Instantel **Series 3 (Minimate)** call-in activity on a Blastware server. @@ -28,9 +28,9 @@ Install dependencies with: ## Usage -Run the emitter from the folder containing the script: +Run the agent from the folder containing the script: -`python series3_emitter.py` +`python series3_agent.py` The script will: @@ -38,7 +38,7 @@ The script will: 2. Sniff each file header for the unit ID. 3. Print a status line for each detected unit (OK / Pending / Missing). 4. Optionally POST a heartbeat payload on an interval when `API_ENABLED=true`. -5. Write logs into the `emitter_logs/` folder and auto-clean old logs. +5. Write logs into the `agent_logs/` folder and auto-clean old logs. --- @@ -65,7 +65,7 @@ Key fields: ## Logs -Logs are stored under `emitter_logs/`. +Logs are stored under `agent_logs/`. Git ignores all log files but keeps the folder itself. --- @@ -74,7 +74,7 @@ Git ignores all log files but keeps the folder itself. This repo follows **Semantic Versioning (SemVer)**. -Current release: **v1.2.0** β€” rosterless scanning with API heartbeat option. +Current release: **v1.2.1** β€” renamed to series3 ingest agent. See `CHANGELOG.md` for details. --- diff --git a/README_DL2.md b/README_DL2.md index 9096a87..43418e4 100644 --- a/README_DL2.md +++ b/README_DL2.md @@ -1,10 +1,10 @@ -# Series 3 Emitter β€” v1_0(py38-safe) for DL2 +# Series 3 Ingest Agent β€” v1_0(py38-safe) for DL2 **Target**: Windows 7 + Python 3.8.10 **Baseline**: v5_4 (no logic changes) ## Files -- series3_emitter_v1_0_py38.py β€” main script (py38-safe) +- series3_agent_v1_0_py38.py β€” main script (py38-safe) - config.ini β€” your config (already included) - series3_roster.csv β€” your roster (already included, this auto updates from a URL to a dropbox file) - requirements.txt β€” none beyond stdlib @@ -15,7 +15,7 @@ 3) Open CMD: ```cmd cd C:\SeismoEmitter - python series3_emitter_v1_0_py38.py + python series3_agent_v1_0_py38.py ``` (If the console shows escape codes on Win7, set `COLORIZE = False` in `config.ini`.) @@ -23,4 +23,4 @@ - Heartbeat prints Local/UTC timestamps - One line per active roster unit with OK/Pending/Missing, Age, Last, File - Unexpected units block shows .MLG not in roster -- emitter.log rotates per LOG_RETENTION_DAYS +- agent.log rotates per LOG_RETENTION_DAYS diff --git a/config-template.ini b/config-template.ini index 72aad8c..849a4cd 100644 --- a/config-template.ini +++ b/config-template.ini @@ -1,4 +1,4 @@ -[emitter] +[agent] # --- API Heartbeat Settings --- API_ENABLED = true @@ -19,7 +19,7 @@ MISSING_HOURS = 24 # Logging ENABLE_LOGGING = True -LOG_FILE = C:\SeismoEmitter\emitter_logs\series3_emitter.log +LOG_FILE = C:\SeismoEmitter\agent_logs\series3_agent.log LOG_RETENTION_DAYS = 30 # Console colors - (Doesn't work on windows 7) diff --git a/series3_emitter.py b/series3_agent.py similarity index 96% rename from series3_emitter.py rename to series3_agent.py index e8218e2..080544e 100644 --- a/series3_emitter.py +++ b/series3_agent.py @@ -1,5 +1,5 @@ """ -Series 3 Emitter β€” v1.2.0 +Series 3 Ingest Agent β€” v1.2.1 Environment: - Python 3.8 (Windows 7 compatible) @@ -31,16 +31,16 @@ from socket import gethostname # ---------------- Config ---------------- def load_config(path: str) -> Dict[str, Any]: - """Load INI with tolerant inline comments and a required [emitter] section.""" + """Load INI with tolerant inline comments and a required [agent] section.""" cp = configparser.ConfigParser(inline_comment_prefixes=(";", "#")) cp.optionxform = str # preserve key case with open(path, "r", encoding="utf-8") as f: txt = f.read() # Ensure we have a section header if not re.search(r"^\s*\[", txt, flags=re.M): - txt = "[emitter]\n" + txt + txt = "[agent]\n" + txt cp.read_string(txt) - sec = cp["emitter"] + sec = cp["agent"] def get_str(k: str, dflt: str) -> str: return sec.get(k, dflt).strip() @@ -63,7 +63,7 @@ def load_config(path: str) -> Dict[str, Any]: "OK_HOURS": float(get_int("OK_HOURS", 12)), "MISSING_HOURS": float(get_int("MISSING_HOURS", 24)), "ENABLE_LOGGING": get_bool("ENABLE_LOGGING", True), - "LOG_FILE": get_str("LOG_FILE", r"C:\SeismoEmitter\emitter_logs\series3_emitter.log"), + "LOG_FILE": get_str("LOG_FILE", r"C:\SeismoEmitter\agent_logs\series3_agent.log"), "LOG_RETENTION_DAYS": get_int("LOG_RETENTION_DAYS", 30), "COLORIZE": get_bool("COLORIZE", False), # Win7 default off "MLG_HEADER_BYTES": max(256, min(get_int("MLG_HEADER_BYTES", 2048), 65536)), @@ -75,7 +75,7 @@ def load_config(path: str) -> Dict[str, Any]: "API_URL": get_str("API_URL", ""), "API_INTERVAL_SECONDS": get_int("API_INTERVAL_SECONDS", 300), "SOURCE_ID": get_str("SOURCE_ID", gethostname()), - "SOURCE_TYPE": get_str("SOURCE_TYPE", "series3_emitter"), + "SOURCE_TYPE": get_str("SOURCE_TYPE", "series3_ingest_agent"), } @@ -95,7 +95,7 @@ def log_message(path: str, enabled: bool, msg: str) -> None: with open(path, "a", encoding="utf-8") as f: f.write("{} {}\n".format(datetime.now(timezone.utc).isoformat(), msg)) except Exception: - # Logging must never crash the emitter + # Logging must never crash the agent pass @@ -239,7 +239,7 @@ def build_sfm_payload(units_dict: Dict[str, Dict[str, Any]], cfg: Dict[str, Any] payload = { "source_id": cfg.get("SOURCE_ID", gethostname()), - "source_type": cfg.get("SOURCE_TYPE", "series3_emitter"), + "source_type": cfg.get("SOURCE_TYPE", "series3_ingest_agent"), "timestamp": now_iso, "units": [], }