diff --git a/backend/main.py b/backend/main.py index 02c05f3..6442ab3 100644 --- a/backend/main.py +++ b/backend/main.py @@ -1,13 +1,22 @@ import os +import logging from fastapi import FastAPI, Request, Depends from fastapi.middleware.cors import CORSMiddleware from fastapi.staticfiles import StaticFiles from fastapi.templating import Jinja2Templates from fastapi.responses import HTMLResponse, FileResponse, JSONResponse +from fastapi.exceptions import RequestValidationError from sqlalchemy.orm import Session from typing import List, Dict from pydantic import BaseModel +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger(__name__) + from backend.database import engine, Base, get_db from backend.routers import roster, units, photos, roster_edit, dashboard, dashboard_tabs, activity, slmm, slm_ui, slm_dashboard, seismo_dashboard from backend.services.snapshot import emit_status_snapshot @@ -27,6 +36,16 @@ app = FastAPI( version=VERSION ) +# Add validation error handler to log details +@app.exception_handler(RequestValidationError) +async def validation_exception_handler(request: Request, exc: RequestValidationError): + logger.error(f"Validation error on {request.url}: {exc.errors()}") + logger.error(f"Body: {await request.body()}") + return JSONResponse( + status_code=400, + content={"detail": exc.errors()} + ) + # Configure CORS app.add_middleware( CORSMiddleware, @@ -372,6 +391,127 @@ async def unknown_emitters_partial(request: Request): }) +@app.get("/partials/devices-all", response_class=HTMLResponse) +async def devices_all_partial(request: Request): + """Unified partial template for ALL devices with comprehensive filtering support""" + from datetime import datetime + snapshot = emit_status_snapshot() + + units_list = [] + + # Add deployed/active units + for unit_id, unit_data in snapshot["active"].items(): + units_list.append({ + "id": unit_id, + "status": unit_data.get("status", "Unknown"), + "age": unit_data.get("age", "N/A"), + "last_seen": unit_data.get("last", "Never"), + "deployed": True, + "retired": False, + "ignored": False, + "note": unit_data.get("note", ""), + "device_type": unit_data.get("device_type", "seismograph"), + "address": unit_data.get("address", ""), + "coordinates": unit_data.get("coordinates", ""), + "project_id": unit_data.get("project_id", ""), + "last_calibrated": unit_data.get("last_calibrated"), + "next_calibration_due": unit_data.get("next_calibration_due"), + "deployed_with_modem_id": unit_data.get("deployed_with_modem_id"), + "ip_address": unit_data.get("ip_address"), + "phone_number": unit_data.get("phone_number"), + "hardware_model": unit_data.get("hardware_model"), + }) + + # Add benched units + for unit_id, unit_data in snapshot["benched"].items(): + units_list.append({ + "id": unit_id, + "status": unit_data.get("status", "N/A"), + "age": unit_data.get("age", "N/A"), + "last_seen": unit_data.get("last", "Never"), + "deployed": False, + "retired": False, + "ignored": False, + "note": unit_data.get("note", ""), + "device_type": unit_data.get("device_type", "seismograph"), + "address": unit_data.get("address", ""), + "coordinates": unit_data.get("coordinates", ""), + "project_id": unit_data.get("project_id", ""), + "last_calibrated": unit_data.get("last_calibrated"), + "next_calibration_due": unit_data.get("next_calibration_due"), + "deployed_with_modem_id": unit_data.get("deployed_with_modem_id"), + "ip_address": unit_data.get("ip_address"), + "phone_number": unit_data.get("phone_number"), + "hardware_model": unit_data.get("hardware_model"), + }) + + # Add retired units + for unit_id, unit_data in snapshot["retired"].items(): + units_list.append({ + "id": unit_id, + "status": "Retired", + "age": "N/A", + "last_seen": "N/A", + "deployed": False, + "retired": True, + "ignored": False, + "note": unit_data.get("note", ""), + "device_type": unit_data.get("device_type", "seismograph"), + "address": unit_data.get("address", ""), + "coordinates": unit_data.get("coordinates", ""), + "project_id": unit_data.get("project_id", ""), + "last_calibrated": unit_data.get("last_calibrated"), + "next_calibration_due": unit_data.get("next_calibration_due"), + "deployed_with_modem_id": unit_data.get("deployed_with_modem_id"), + "ip_address": unit_data.get("ip_address"), + "phone_number": unit_data.get("phone_number"), + "hardware_model": unit_data.get("hardware_model"), + }) + + # Add ignored units + for unit_id, unit_data in snapshot.get("ignored", {}).items(): + units_list.append({ + "id": unit_id, + "status": "Ignored", + "age": "N/A", + "last_seen": "N/A", + "deployed": False, + "retired": False, + "ignored": True, + "note": unit_data.get("note", unit_data.get("reason", "")), + "device_type": unit_data.get("device_type", "unknown"), + "address": "", + "coordinates": "", + "project_id": "", + "last_calibrated": None, + "next_calibration_due": None, + "deployed_with_modem_id": None, + "ip_address": None, + "phone_number": None, + "hardware_model": None, + }) + + # Sort by status category, then by ID + def sort_key(unit): + # Priority: deployed (active) -> benched -> retired -> ignored + if unit["deployed"]: + return (0, unit["id"]) + elif not unit["retired"] and not unit["ignored"]: + return (1, unit["id"]) + elif unit["retired"]: + return (2, unit["id"]) + else: + return (3, unit["id"]) + + units_list.sort(key=sort_key) + + return templates.TemplateResponse("partials/devices_table.html", { + "request": request, + "units": units_list, + "timestamp": datetime.now().strftime("%H:%M:%S") + }) + + @app.get("/health") def health_check(): """Health check endpoint""" diff --git a/backend/routers/roster_edit.py b/backend/routers/roster_edit.py index 17e3c6f..dd0c192 100644 --- a/backend/routers/roster_edit.py +++ b/backend/routers/roster_edit.py @@ -1,13 +1,21 @@ -from fastapi import APIRouter, Depends, HTTPException, Form, UploadFile, File +from fastapi import APIRouter, Depends, HTTPException, Form, UploadFile, File, Request +from fastapi.exceptions import RequestValidationError from sqlalchemy.orm import Session from datetime import datetime, date import csv import io +import logging +import httpx +import os from backend.database import get_db from backend.models import RosterUnit, IgnoredUnit, Emitter, UnitHistory router = APIRouter(prefix="/api/roster", tags=["roster-edit"]) +logger = logging.getLogger(__name__) + +# SLMM backend URL for syncing device configs to cache +SLMM_BASE_URL = os.getenv("SLMM_BASE_URL", "http://localhost:8100") def record_history(db: Session, unit_id: str, change_type: str, field_name: str = None, @@ -37,13 +45,98 @@ def get_or_create_roster_unit(db: Session, unit_id: str): return unit +async def sync_slm_to_slmm_cache( + unit_id: str, + host: str = None, + tcp_port: int = None, + ftp_port: int = None, + ftp_username: str = None, + ftp_password: str = None, + deployed_with_modem_id: str = None, + db: Session = None +) -> dict: + """ + Sync SLM device configuration to SLMM backend cache. + + Terra-View is the source of truth for device configs. This function updates + SLMM's config cache (NL43Config table) so SLMM can look up device connection + info by unit_id without Terra-View passing host:port with every request. + + Args: + unit_id: Unique identifier for the SLM device + host: Direct IP address/hostname OR will be resolved from modem + tcp_port: TCP control port (default: 2255) + ftp_port: FTP port (default: 21) + ftp_username: FTP username (optional) + ftp_password: FTP password (optional) + deployed_with_modem_id: If set, resolve modem IP as host + db: Database session for modem lookup + + Returns: + dict: {"success": bool, "message": str} + """ + # Resolve host from modem if assigned + if deployed_with_modem_id and db: + modem = db.query(RosterUnit).filter_by( + id=deployed_with_modem_id, + device_type="modem" + ).first() + if modem and modem.ip_address: + host = modem.ip_address + logger.info(f"Resolved host from modem {deployed_with_modem_id}: {host}") + + # Validate required fields + if not host: + logger.warning(f"Cannot sync SLM {unit_id} to SLMM: no host/IP address provided") + return {"success": False, "message": "No host IP address available"} + + # Set defaults + tcp_port = tcp_port or 2255 + ftp_port = ftp_port or 21 + + # Build SLMM cache payload + config_payload = { + "host": host, + "tcp_port": tcp_port, + "tcp_enabled": True, + "ftp_enabled": bool(ftp_username and ftp_password), + "web_enabled": False + } + + if ftp_username and ftp_password: + config_payload["ftp_username"] = ftp_username + config_payload["ftp_password"] = ftp_password + + # Call SLMM cache update API + slmm_url = f"{SLMM_BASE_URL}/api/nl43/{unit_id}/config" + + try: + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.put(slmm_url, json=config_payload) + + if response.status_code in [200, 201]: + logger.info(f"Successfully synced SLM {unit_id} to SLMM cache") + return {"success": True, "message": "Device config cached in SLMM"} + else: + logger.error(f"SLMM cache sync failed for {unit_id}: HTTP {response.status_code}") + return {"success": False, "message": f"SLMM returned status {response.status_code}"} + + except httpx.ConnectError: + logger.error(f"Cannot connect to SLMM service at {SLMM_BASE_URL}") + return {"success": False, "message": "SLMM service unavailable"} + + except Exception as e: + logger.error(f"Error syncing SLM {unit_id} to SLMM: {e}") + return {"success": False, "message": str(e)} + + @router.post("/add") -def add_roster_unit( +async def add_roster_unit( id: str = Form(...), device_type: str = Form("seismograph"), unit_type: str = Form("series3"), - deployed: bool = Form(False), - retired: bool = Form(False), + deployed: str = Form(None), + retired: str = Form(None), note: str = Form(""), project_id: str = Form(None), location: str = Form(None), @@ -68,9 +161,11 @@ def add_roster_unit( slm_measurement_range: str = Form(None), db: Session = Depends(get_db) ): - import logging - logger = logging.getLogger(__name__) - logger.info(f"Adding unit: id={id}, device_type={device_type}, slm_tcp_port={slm_tcp_port}, slm_ftp_port={slm_ftp_port}") + logger.info(f"Adding unit: id={id}, device_type={device_type}, deployed={deployed}, retired={retired}") + + # Convert boolean strings to actual booleans + deployed_bool = deployed in ['true', 'True', '1', 'yes'] if deployed else False + retired_bool = retired in ['true', 'True', '1', 'yes'] if retired else False # Convert port strings to integers slm_tcp_port_int = int(slm_tcp_port) if slm_tcp_port and slm_tcp_port.strip() else None @@ -98,8 +193,8 @@ def add_roster_unit( id=id, device_type=device_type, unit_type=unit_type, - deployed=deployed, - retired=retired, + deployed=deployed_bool, + retired=retired_bool, note=note, project_id=project_id, location=location, @@ -126,6 +221,24 @@ def add_roster_unit( ) db.add(unit) db.commit() + + # If sound level meter, sync config to SLMM cache + if device_type == "sound_level_meter": + logger.info(f"Syncing SLM {id} config to SLMM cache...") + result = await sync_slm_to_slmm_cache( + unit_id=id, + host=slm_host, + tcp_port=slm_tcp_port_int, + ftp_port=slm_ftp_port_int, + deployed_with_modem_id=deployed_with_modem_id, + db=db + ) + + if not result["success"]: + logger.warning(f"SLMM cache sync warning for {id}: {result['message']}") + # Don't fail the operation - device is still added to Terra-View roster + # User can manually sync later or SLMM will be synced on next config update + return {"message": "Unit added", "id": id, "device_type": device_type} @@ -186,8 +299,8 @@ def edit_roster_unit( unit_id: str, device_type: str = Form("seismograph"), unit_type: str = Form("series3"), - deployed: bool = Form(False), - retired: bool = Form(False), + deployed: str = Form(None), + retired: str = Form(None), note: str = Form(""), project_id: str = Form(None), location: str = Form(None), @@ -216,6 +329,10 @@ def edit_roster_unit( if not unit: raise HTTPException(status_code=404, detail="Unit not found") + # Convert boolean strings to actual booleans + deployed_bool = deployed in ['true', 'True', '1', 'yes'] if deployed else False + retired_bool = retired in ['true', 'True', '1', 'yes'] if retired else False + # Convert port strings to integers slm_tcp_port_int = int(slm_tcp_port) if slm_tcp_port and slm_tcp_port.strip() else None slm_ftp_port_int = int(slm_ftp_port) if slm_ftp_port and slm_ftp_port.strip() else None @@ -243,8 +360,8 @@ def edit_roster_unit( # Update all fields unit.device_type = device_type unit.unit_type = unit_type - unit.deployed = deployed - unit.retired = retired + unit.deployed = deployed_bool + unit.retired = retired_bool unit.note = note unit.project_id = project_id unit.location = location diff --git a/backend/routers/slm_dashboard.py b/backend/routers/slm_dashboard.py index 857669e..3d9c0df 100644 --- a/backend/routers/slm_dashboard.py +++ b/backend/routers/slm_dashboard.py @@ -12,15 +12,20 @@ from sqlalchemy import func from datetime import datetime, timedelta import httpx import logging +import os from backend.database import get_db from backend.models import RosterUnit +from backend.routers.roster_edit import sync_slm_to_slmm_cache logger = logging.getLogger(__name__) router = APIRouter(prefix="/api/slm-dashboard", tags=["slm-dashboard"]) templates = Jinja2Templates(directory="templates") +# SLMM backend URL - configurable via environment variable +SLMM_BASE_URL = os.getenv("SLMM_BASE_URL", "http://localhost:8100") + @router.get("/stats", response_class=HTMLResponse) async def get_slm_stats(request: Request, db: Session = Depends(get_db)): @@ -120,7 +125,7 @@ async def get_live_view(request: Request, unit_id: str, db: Session = Depends(ge async with httpx.AsyncClient(timeout=5.0) as client: # Get measurement state state_response = await client.get( - f"http://localhost:8100/api/nl43/{unit_id}/measurement-state" + f"{SLMM_BASE_URL}/api/nl43/{unit_id}/measurement-state" ) if state_response.status_code == 200: state_data = state_response.json() @@ -129,7 +134,7 @@ async def get_live_view(request: Request, unit_id: str, db: Session = Depends(ge # Get live status status_response = await client.get( - f"http://localhost:8100/api/nl43/{unit_id}/live" + f"{SLMM_BASE_URL}/api/nl43/{unit_id}/live" ) if status_response.status_code == 200: status_data = status_response.json() @@ -162,7 +167,7 @@ async def control_slm(unit_id: str, action: str): try: async with httpx.AsyncClient(timeout=10.0) as client: response = await client.post( - f"http://localhost:8100/api/nl43/{unit_id}/{action}" + f"{SLMM_BASE_URL}/api/nl43/{unit_id}/{action}" ) if response.status_code == 200: @@ -239,6 +244,21 @@ async def save_slm_config(request: Request, unit_id: str, db: Session = Depends( db.commit() logger.info(f"Updated configuration for SLM {unit_id}") + # Sync updated configuration to SLMM cache + logger.info(f"Syncing SLM {unit_id} config changes to SLMM cache...") + result = await sync_slm_to_slmm_cache( + unit_id=unit_id, + host=unit.slm_host, # Use the updated host from Terra-View + tcp_port=unit.slm_tcp_port, + ftp_port=unit.slm_ftp_port, + deployed_with_modem_id=unit.deployed_with_modem_id, # Resolve modem IP if assigned + db=db + ) + + if not result["success"]: + logger.warning(f"SLMM cache sync warning for {unit_id}: {result['message']}") + # Config still saved in Terra-View (source of truth) + return {"status": "success", "unit_id": unit_id} except Exception as e: diff --git a/backend/routers/slmm.py b/backend/routers/slmm.py index b075637..1c73f5e 100644 --- a/backend/routers/slmm.py +++ b/backend/routers/slmm.py @@ -5,9 +5,11 @@ Proxies requests from SFM to the standalone SLMM backend service. SLMM runs on port 8100 and handles NL43/NL53 sound level meter communication. """ -from fastapi import APIRouter, HTTPException, Request, Response +from fastapi import APIRouter, HTTPException, Request, Response, WebSocket, WebSocketDisconnect from fastapi.responses import StreamingResponse import httpx +import websockets +import asyncio import logging import os @@ -17,6 +19,8 @@ router = APIRouter(prefix="/api/slmm", tags=["slmm"]) # SLMM backend URL - configurable via environment variable SLMM_BASE_URL = os.getenv("SLMM_BASE_URL", "http://localhost:8100") +# WebSocket URL derived from HTTP URL +SLMM_WS_BASE_URL = SLMM_BASE_URL.replace("http://", "ws://").replace("https://", "wss://") @router.get("/health") @@ -61,6 +65,173 @@ async def check_slmm_health(): } +# WebSocket routes MUST come before the catch-all route +@router.websocket("/{unit_id}/stream") +async def proxy_websocket_stream(websocket: WebSocket, unit_id: str): + """ + Proxy WebSocket connections to SLMM's /stream endpoint. + + This allows real-time streaming of measurement data from NL43 devices + through the SFM unified interface. + """ + await websocket.accept() + logger.info(f"WebSocket connection accepted for SLMM unit {unit_id}") + + # Build target WebSocket URL + target_ws_url = f"{SLMM_WS_BASE_URL}/api/nl43/{unit_id}/stream" + logger.info(f"Connecting to SLMM WebSocket: {target_ws_url}") + + backend_ws = None + + try: + # Connect to SLMM backend WebSocket + backend_ws = await websockets.connect(target_ws_url) + logger.info(f"Connected to SLMM backend WebSocket for {unit_id}") + + # Create tasks for bidirectional communication + async def forward_to_backend(): + """Forward messages from client to SLMM backend""" + try: + while True: + data = await websocket.receive_text() + await backend_ws.send(data) + except WebSocketDisconnect: + logger.info(f"Client WebSocket disconnected for {unit_id}") + except Exception as e: + logger.error(f"Error forwarding to backend: {e}") + + async def forward_to_client(): + """Forward messages from SLMM backend to client""" + try: + async for message in backend_ws: + await websocket.send_text(message) + except websockets.exceptions.ConnectionClosed: + logger.info(f"Backend WebSocket closed for {unit_id}") + except Exception as e: + logger.error(f"Error forwarding to client: {e}") + + # Run both forwarding tasks concurrently + await asyncio.gather( + forward_to_backend(), + forward_to_client(), + return_exceptions=True + ) + + except websockets.exceptions.WebSocketException as e: + logger.error(f"WebSocket error connecting to SLMM backend: {e}") + try: + await websocket.send_json({ + "error": "Failed to connect to SLMM backend", + "detail": str(e) + }) + except Exception: + pass + except Exception as e: + logger.error(f"Unexpected error in WebSocket proxy for {unit_id}: {e}") + try: + await websocket.send_json({ + "error": "Internal server error", + "detail": str(e) + }) + except Exception: + pass + finally: + # Clean up connections + if backend_ws: + try: + await backend_ws.close() + except Exception: + pass + try: + await websocket.close() + except Exception: + pass + logger.info(f"WebSocket proxy closed for {unit_id}") + + +@router.websocket("/{unit_id}/live") +async def proxy_websocket_live(websocket: WebSocket, unit_id: str): + """ + Proxy WebSocket connections to SLMM's /live endpoint. + + Alternative WebSocket endpoint that may be used by some frontend components. + """ + await websocket.accept() + logger.info(f"WebSocket connection accepted for SLMM unit {unit_id} (live endpoint)") + + # Build target WebSocket URL - try /stream endpoint as SLMM uses that for WebSocket + target_ws_url = f"{SLMM_WS_BASE_URL}/api/nl43/{unit_id}/stream" + logger.info(f"Connecting to SLMM WebSocket: {target_ws_url}") + + backend_ws = None + + try: + # Connect to SLMM backend WebSocket + backend_ws = await websockets.connect(target_ws_url) + logger.info(f"Connected to SLMM backend WebSocket for {unit_id} (live endpoint)") + + # Create tasks for bidirectional communication + async def forward_to_backend(): + """Forward messages from client to SLMM backend""" + try: + while True: + data = await websocket.receive_text() + await backend_ws.send(data) + except WebSocketDisconnect: + logger.info(f"Client WebSocket disconnected for {unit_id} (live)") + except Exception as e: + logger.error(f"Error forwarding to backend (live): {e}") + + async def forward_to_client(): + """Forward messages from SLMM backend to client""" + try: + async for message in backend_ws: + await websocket.send_text(message) + except websockets.exceptions.ConnectionClosed: + logger.info(f"Backend WebSocket closed for {unit_id} (live)") + except Exception as e: + logger.error(f"Error forwarding to client (live): {e}") + + # Run both forwarding tasks concurrently + await asyncio.gather( + forward_to_backend(), + forward_to_client(), + return_exceptions=True + ) + + except websockets.exceptions.WebSocketException as e: + logger.error(f"WebSocket error connecting to SLMM backend (live): {e}") + try: + await websocket.send_json({ + "error": "Failed to connect to SLMM backend", + "detail": str(e) + }) + except Exception: + pass + except Exception as e: + logger.error(f"Unexpected error in WebSocket proxy for {unit_id} (live): {e}") + try: + await websocket.send_json({ + "error": "Internal server error", + "detail": str(e) + }) + except Exception: + pass + finally: + # Clean up connections + if backend_ws: + try: + await backend_ws.close() + except Exception: + pass + try: + await websocket.close() + except Exception: + pass + logger.info(f"WebSocket proxy closed for {unit_id} (live)") + + +# HTTP catch-all route MUST come after specific routes (including WebSocket routes) @router.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE", "PATCH"]) async def proxy_to_slmm(path: str, request: Request): """ diff --git a/docker-compose.yml b/docker-compose.yml index cb16f59..ddb9e1d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,9 +1,9 @@ services: - # --- PRODUCTION --- - seismo-backend: + # --- TERRA-VIEW PRODUCTION --- + terra-view-prod: build: . - container_name: seismo-fleet-manager + container_name: terra-view ports: - "8001:8001" volumes: @@ -11,8 +11,10 @@ services: environment: - PYTHONUNBUFFERED=1 - ENVIRONMENT=production - - SLMM_BASE_URL=http://172.19.0.1:8100 + - SLMM_BASE_URL=http://slmm:8100 restart: unless-stopped + depends_on: + - slmm healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8001/health"] interval: 30s @@ -20,10 +22,10 @@ services: retries: 3 start_period: 40s - # --- DEVELOPMENT --- - sfm-dev: + # --- TERRA-VIEW DEVELOPMENT --- + terra-view-dev: build: . - container_name: sfm-dev + container_name: terra-view-dev ports: - "1001:8001" volumes: @@ -31,7 +33,10 @@ services: environment: - PYTHONUNBUFFERED=1 - ENVIRONMENT=development + - SLMM_BASE_URL=http://slmm:8100 restart: unless-stopped + depends_on: + - slmm healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8001/health"] interval: 30s @@ -39,6 +44,28 @@ services: retries: 3 start_period: 40s + # --- SLMM (Sound Level Meter Manager) --- + slmm: + build: + context: ../../slmm + dockerfile: Dockerfile + container_name: slmm + ports: + - "8100:8100" + volumes: + - ../../slmm/data:/app/data + environment: + - PYTHONUNBUFFERED=1 + - PORT=8100 + - CORS_ORIGINS=* + restart: unless-stopped + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8100/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 10s + volumes: data: data-dev: diff --git a/sync_slms_to_slmm.py b/sync_slms_to_slmm.py new file mode 100755 index 0000000..9fe3451 --- /dev/null +++ b/sync_slms_to_slmm.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +""" +One-time script to sync existing SLM devices from Terra-View roster to SLMM cache. +Run this after implementing the automatic sync to backfill existing devices. +""" +import asyncio +import sys +import os + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +from backend.database import SessionLocal +from backend.models import RosterUnit +from backend.routers.roster_edit import sync_slm_to_slmm_cache +import logging + +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + + +async def sync_all_slms(): + """Sync all SLM devices from Terra-View roster to SLMM cache.""" + db = SessionLocal() + try: + # Get all SLM devices from Terra-View (source of truth) + slm_devices = db.query(RosterUnit).filter_by( + device_type="sound_level_meter" + ).all() + + logger.info(f"Found {len(slm_devices)} SLM devices in Terra-View roster") + + success_count = 0 + failed_count = 0 + + for device in slm_devices: + logger.info(f"\nProcessing: {device.id}") + logger.info(f" Host: {device.slm_host}") + logger.info(f" TCP Port: {device.slm_tcp_port}") + logger.info(f" Modem: {device.deployed_with_modem_id}") + + result = await sync_slm_to_slmm_cache( + unit_id=device.id, + host=device.slm_host, + tcp_port=device.slm_tcp_port, + ftp_port=device.slm_ftp_port, + deployed_with_modem_id=device.deployed_with_modem_id, + db=db + ) + + if result["success"]: + logger.info(f"✓ {device.id}: {result['message']}") + success_count += 1 + else: + logger.error(f"✗ {device.id}: {result['message']}") + failed_count += 1 + + logger.info(f"\n{'='*60}") + logger.info(f"Cache sync complete: {success_count} succeeded, {failed_count} failed") + logger.info(f"{'='*60}") + + finally: + db.close() + + +if __name__ == "__main__": + asyncio.run(sync_all_slms()) diff --git a/templates/base.html b/templates/base.html index 444fdee..645646d 100644 --- a/templates/base.html +++ b/templates/base.html @@ -110,7 +110,7 @@ - Fleet Roster + Devices @@ -194,7 +194,7 @@ - Roster + Devices + + + +
+ +
+ + +
+ +
+ + +
+
+ + + + + + diff --git a/templates/partials/slm_live_view.html b/templates/partials/slm_live_view.html index d10264d..c5ee656 100644 --- a/templates/partials/slm_live_view.html +++ b/templates/partials/slm_live_view.html @@ -140,32 +140,73 @@ - -
-
- Battery: - - {% if current_status and current_status.battery_level %}{{ current_status.battery_level }}%{% else %}--{% endif %} - -
-
- Power: - - {% if current_status and current_status.power_source %}{{ current_status.power_source }}{% else %}--{% endif %} - -
-
- Weighting: - - {% if unit.slm_frequency_weighting %}{{ unit.slm_frequency_weighting }}{% else %}--{% endif %} / - {% if unit.slm_time_weighting %}{{ unit.slm_time_weighting }}{% else %}--{% endif %} - -
-
- SD Remaining: - - {% if current_status and current_status.sd_remaining_mb %}{{ current_status.sd_remaining_mb }} MB{% else %}--{% endif %} - + +
+

Device Status

+
+ +
+
+ Battery + + + +
+
+ {% if current_status and current_status.battery_level %}{{ current_status.battery_level }}%{% else %}--{% endif %} +
+
+
+
+
+
+ + +
+
+ Power + + + +
+
+ {% if current_status and current_status.power_source %}{{ current_status.power_source }}{% else %}--{% endif %} +
+
+ + +
+
+ SD Card + + + +
+
+ {% if current_status and current_status.sd_remaining_mb %}{{ current_status.sd_remaining_mb }} MB{% else %}--{% endif %} +
+
+ {% if current_status and current_status.sd_free_ratio %}{{ current_status.sd_free_ratio }}% free{% else %}--{% endif %} +
+
+ + +
+
+ Last Update + + + +
+
+ Just now +
+
+ + Auto-refresh: 30s +
+
@@ -429,6 +470,94 @@ async function controlUnit(unitId, action) { } } + +// Auto-refresh status every 30 seconds +let refreshInterval; +const REFRESH_INTERVAL_MS = 30000; // 30 seconds +const unit_id = '{{ unit.id }}'; + +function updateDeviceStatus() { + fetch(`/api/slmm/${unit_id}/live`) + .then(response => response.json()) + .then(result => { + if (result.status === 'ok' && result.data) { + const data = result.data; + + // Update battery + if (document.getElementById('battery-level')) { + const batteryLevel = data.battery_level || '--'; + document.getElementById('battery-level').textContent = batteryLevel === '--' ? '--' : `${batteryLevel}%`; + + // Update battery bar + const batteryBar = document.getElementById('battery-bar'); + if (batteryBar && batteryLevel !== '--') { + const level = parseInt(batteryLevel); + batteryBar.style.width = `${level}%`; + + // Color based on level + if (level > 50) { + batteryBar.className = 'bg-green-500 h-2 rounded-full transition-all'; + } else if (level > 20) { + batteryBar.className = 'bg-yellow-500 h-2 rounded-full transition-all'; + } else { + batteryBar.className = 'bg-red-500 h-2 rounded-full transition-all'; + } + } + } + + // Update power source + if (document.getElementById('power-source')) { + document.getElementById('power-source').textContent = data.power_source || '--'; + } + + // Update SD card info + if (document.getElementById('sd-remaining')) { + const sdRemaining = data.sd_remaining_mb || '--'; + document.getElementById('sd-remaining').textContent = sdRemaining === '--' ? '--' : `${sdRemaining} MB`; + } + if (document.getElementById('sd-ratio')) { + const sdRatio = data.sd_free_ratio || '--'; + document.getElementById('sd-ratio').textContent = sdRatio === '--' ? '--' : `${sdRatio}% free`; + } + + // Update last update timestamp + if (document.getElementById('last-update')) { + const now = new Date(); + document.getElementById('last-update').textContent = now.toLocaleTimeString(); + } + } + }) + .catch(error => { + console.error('Failed to refresh device status:', error); + // Update last update with error indicator + if (document.getElementById('last-update')) { + document.getElementById('last-update').textContent = 'Update failed'; + } + }); +} + +// Start auto-refresh +function startAutoRefresh() { + // Initial update + updateDeviceStatus(); + + // Set up interval + refreshInterval = setInterval(updateDeviceStatus, REFRESH_INTERVAL_MS); + console.log('Auto-refresh started (30s interval)'); +} + +// Stop auto-refresh +function stopAutoRefresh() { + if (refreshInterval) { + clearInterval(refreshInterval); + refreshInterval = null; + console.log('Auto-refresh stopped'); + } +} + +// Start auto-refresh when page loads +document.addEventListener('DOMContentLoaded', startAutoRefresh); + // Cleanup on page unload window.addEventListener('beforeunload', function() { if (window.currentWebSocket) { diff --git a/templates/partials/slm_live_view.html.backup b/templates/partials/slm_live_view.html.backup new file mode 100644 index 0000000..d10264d --- /dev/null +++ b/templates/partials/slm_live_view.html.backup @@ -0,0 +1,438 @@ + +
+ +
+
+

{{ unit.id }}

+

+ {% if unit.slm_model %}{{ unit.slm_model }}{% endif %} + {% if unit.slm_serial_number %} • S/N: {{ unit.slm_serial_number }}{% endif %} +

+ {% if modem %} +

+ via Modem: {{ modem.id }}{% if modem_ip %} ({{ modem_ip }}){% endif %} +

+ {% elif modem_ip %} +

+ Direct: {{ modem_ip }} +

+ {% else %} +

+ ⚠️ No modem assigned or IP configured +

+ {% endif %} +
+ + +
+ {% if is_measuring %} + + + Measuring + + {% else %} + + Stopped + + {% endif %} +
+
+ + +
+ + + + + + + + + + + +
+ + +
+
+

Lp (Instant)

+

+ {% if current_status and current_status.lp %}{{ current_status.lp }}{% else %}--{% endif %} +

+

dB

+
+ +
+

Leq (Average)

+

+ {% if current_status and current_status.leq %}{{ current_status.leq }}{% else %}--{% endif %} +

+

dB

+
+ +
+

Lmax (Max)

+

+ {% if current_status and current_status.lmax %}{{ current_status.lmax }}{% else %}--{% endif %} +

+

dB

+
+ +
+

Lmin (Min)

+

+ {% if current_status and current_status.lmin %}{{ current_status.lmin }}{% else %}--{% endif %} +

+

dB

+
+ +
+

Lpeak (Peak)

+

+ {% if current_status and current_status.lpeak %}{{ current_status.lpeak }}{% else %}--{% endif %} +

+

dB

+
+
+ + +
+ +
+ + +
+
+ Battery: + + {% if current_status and current_status.battery_level %}{{ current_status.battery_level }}%{% else %}--{% endif %} + +
+
+ Power: + + {% if current_status and current_status.power_source %}{{ current_status.power_source }}{% else %}--{% endif %} + +
+
+ Weighting: + + {% if unit.slm_frequency_weighting %}{{ unit.slm_frequency_weighting }}{% else %}--{% endif %} / + {% if unit.slm_time_weighting %}{{ unit.slm_time_weighting }}{% else %}--{% endif %} + +
+
+ SD Remaining: + + {% if current_status and current_status.sd_remaining_mb %}{{ current_status.sd_remaining_mb }} MB{% else %}--{% endif %} + +
+
+
+ + + diff --git a/templates/roster.html b/templates/roster.html index dd5aae6..765e1e9 100644 --- a/templates/roster.html +++ b/templates/roster.html @@ -1,20 +1,20 @@ {% extends "base.html" %} -{% block title %}Fleet Roster - Seismo Fleet Manager{% endblock %} +{% block title %}Devices - Seismo Fleet Manager{% endblock %} {% block content %}
-

Fleet Roster

-

Real-time status of all seismograph units

+

Devices

+

Manage all devices in your fleet

- +
- -
+ +
+
+ onkeyup="filterDevices()">
+ + +
+ +
+ Type: + + + + +
+ + +
+ Status: + + + + + +
+ + +
+ Health: + + + + +
+
+ + +
+ Showing 0 of 0 devices +
- -
- - - - - - - -
- - -
+
-

Loading roster data...

+

Loading devices...

@@ -114,9 +112,9 @@
- + placeholder="BE1234 or MODEM-001 (no spaces)">
@@ -550,7 +548,29 @@ // Show success message alert('Unit added successfully!'); } else { - alert('Error adding unit. Please check the form and try again.'); + // Log detailed error information + console.error('Error adding unit:', { + status: event.detail.xhr.status, + response: event.detail.xhr.responseText, + headers: event.detail.xhr.getAllResponseHeaders() + }); + + // Try to parse error message from response + let errorMsg = 'Error adding unit. Please check the form and try again.'; + try { + const response = JSON.parse(event.detail.xhr.responseText); + if (response.detail) { + if (typeof response.detail === 'string') { + errorMsg = response.detail; + } else if (Array.isArray(response.detail)) { + errorMsg = response.detail.map(err => `${err.loc?.join('.')}: ${err.msg}`).join('\n'); + } + } + } catch (e) { + console.error('Could not parse error response:', e); + } + + alert(errorMsg); } }); @@ -904,33 +924,203 @@ } } - // Filter roster table based on search input - function filterRosterTable() { - const searchInput = document.getElementById('roster-search').value.toLowerCase(); - const table = document.querySelector('#roster-content table tbody'); + // ===== DEVICE FILTERING SYSTEM ===== - if (!table) return; + // Current active filters + let activeFilters = { + deviceType: 'all', + status: 'all', + health: 'all', + search: '' + }; - const rows = table.getElementsByTagName('tr'); + // Initialize filter button click handlers + document.addEventListener('DOMContentLoaded', function() { + // Device type filter buttons + document.querySelectorAll('.filter-device-type').forEach(btn => { + btn.addEventListener('click', function() { + // Update active state + document.querySelectorAll('.filter-device-type').forEach(b => b.classList.remove('active-filter')); + this.classList.add('active-filter'); - for (let row of rows) { - const cells = row.getElementsByTagName('td'); - if (cells.length === 0) continue; // Skip header or empty rows + // Update filter value + activeFilters.deviceType = this.dataset.value; - const unitId = cells[1]?.textContent?.toLowerCase() || ''; - const unitType = cells[2]?.textContent?.toLowerCase() || ''; - const note = cells[6]?.textContent?.toLowerCase() || ''; + // Apply filters + filterDevices(); + }); + }); - const matches = unitId.includes(searchInput) || - unitType.includes(searchInput) || - note.includes(searchInput); + // Status filter buttons + document.querySelectorAll('.filter-status').forEach(btn => { + btn.addEventListener('click', function() { + // Update active state + document.querySelectorAll('.filter-status').forEach(b => b.classList.remove('active-filter')); + this.classList.add('active-filter'); - row.style.display = matches ? '' : 'none'; + // Update filter value + activeFilters.status = this.dataset.value; + + // Toggle health filter visibility (hide for retired/ignored) + const healthGroup = document.getElementById('health-filter-group'); + if (this.dataset.value === 'retired' || this.dataset.value === 'ignored') { + healthGroup.style.display = 'none'; + } else { + healthGroup.style.display = 'flex'; + } + + // Apply filters + filterDevices(); + }); + }); + + // Health status filter buttons + document.querySelectorAll('.filter-health').forEach(btn => { + btn.addEventListener('click', function() { + // Update active state + document.querySelectorAll('.filter-health').forEach(b => b.classList.remove('active-filter')); + this.classList.add('active-filter'); + + // Update filter value + activeFilters.health = this.dataset.value; + + // Apply filters + filterDevices(); + }); + }); + }); + + // Main filter function - filters devices based on all active criteria + function filterDevices() { + const searchInput = document.getElementById('device-search')?.value.toLowerCase() || ''; + activeFilters.search = searchInput; + + const table = document.querySelector('#device-content table tbody'); + const cards = document.querySelectorAll('#device-content .device-card'); // For mobile view + + let visibleCount = 0; + let totalCount = 0; + + // Filter table rows (desktop view) + if (table) { + const rows = table.getElementsByTagName('tr'); + totalCount = rows.length; + + for (let row of rows) { + const cells = row.getElementsByTagName('td'); + if (cells.length === 0) continue; + + // Extract row data (adjust indices based on your table structure) + const status = cells[0]?.querySelector('.status-badge')?.textContent?.toLowerCase() || ''; + const deviceId = cells[1]?.textContent?.toLowerCase() || ''; + const deviceType = cells[2]?.textContent?.toLowerCase() || ''; + const note = cells[6]?.textContent?.toLowerCase() || ''; + + // Get data attributes for filtering + const rowDeviceType = row.dataset.deviceType || ''; + const rowStatus = row.dataset.status || ''; + const rowHealth = row.dataset.health || ''; + + // Apply filters + const matchesSearch = !searchInput || + deviceId.includes(searchInput) || + deviceType.includes(searchInput) || + note.includes(searchInput); + + const matchesDeviceType = activeFilters.deviceType === 'all' || + rowDeviceType === activeFilters.deviceType; + + const matchesStatus = activeFilters.status === 'all' || + rowStatus === activeFilters.status; + + const matchesHealth = activeFilters.health === 'all' || + rowHealth === activeFilters.health || + activeFilters.status === 'retired' || + activeFilters.status === 'ignored'; + + const isVisible = matchesSearch && matchesDeviceType && matchesStatus && matchesHealth; + + row.style.display = isVisible ? '' : 'none'; + if (isVisible) visibleCount++; + } } + + // Filter cards (mobile view) + if (cards.length > 0) { + totalCount = cards.length; + visibleCount = 0; + + cards.forEach(card => { + const cardDeviceType = card.dataset.deviceType || ''; + const cardStatus = card.dataset.status || ''; + const cardHealth = card.dataset.health || ''; + const cardText = card.textContent.toLowerCase(); + + const matchesSearch = !searchInput || cardText.includes(searchInput); + const matchesDeviceType = activeFilters.deviceType === 'all' || cardDeviceType === activeFilters.deviceType; + const matchesStatus = activeFilters.status === 'all' || cardStatus === activeFilters.status; + const matchesHealth = activeFilters.health === 'all' || cardHealth === activeFilters.health; + + const isVisible = matchesSearch && matchesDeviceType && matchesStatus && matchesHealth; + + card.style.display = isVisible ? '' : 'none'; + if (isVisible) visibleCount++; + }); + } + + // Update count display + document.getElementById('visible-count').textContent = visibleCount; + document.getElementById('total-count').textContent = totalCount; + } + + // Legacy function name for compatibility + function filterRosterTable() { + filterDevices(); }