- seismograph list page enhanced with better visabilty, filtering, sorting, and calibration dates color coded.
1364 lines
52 KiB
Python
1364 lines
52 KiB
Python
from fastapi import APIRouter, Depends, HTTPException, Form, UploadFile, File, Request, Query
|
|
from fastapi.exceptions import RequestValidationError
|
|
from sqlalchemy.orm import Session
|
|
from datetime import datetime, date, timedelta
|
|
import csv
|
|
import io
|
|
import logging
|
|
import httpx
|
|
import os
|
|
|
|
from backend.database import get_db
|
|
from backend.models import RosterUnit, IgnoredUnit, Emitter, UnitHistory, UserPreferences
|
|
from backend.services.slmm_sync import sync_slm_to_slmm
|
|
|
|
router = APIRouter(prefix="/api/roster", tags=["roster-edit"])
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
def get_calibration_interval(db: Session) -> int:
|
|
"""Get calibration interval from user preferences, default 365 days."""
|
|
prefs = db.query(UserPreferences).first()
|
|
if prefs and prefs.calibration_interval_days:
|
|
return prefs.calibration_interval_days
|
|
return 365
|
|
|
|
# SLMM backend URL for syncing device configs to cache
|
|
SLMM_BASE_URL = os.getenv("SLMM_BASE_URL", "http://localhost:8100")
|
|
|
|
|
|
def record_history(db: Session, unit_id: str, change_type: str, field_name: str = None,
|
|
old_value: str = None, new_value: str = None, source: str = "manual", notes: str = None):
|
|
"""Helper function to record a change in unit history"""
|
|
history_entry = UnitHistory(
|
|
unit_id=unit_id,
|
|
change_type=change_type,
|
|
field_name=field_name,
|
|
old_value=old_value,
|
|
new_value=new_value,
|
|
changed_at=datetime.utcnow(),
|
|
source=source,
|
|
notes=notes
|
|
)
|
|
db.add(history_entry)
|
|
# Note: caller is responsible for db.commit()
|
|
|
|
|
|
def get_or_create_roster_unit(db: Session, unit_id: str):
|
|
unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
|
if not unit:
|
|
unit = RosterUnit(id=unit_id)
|
|
db.add(unit)
|
|
db.commit()
|
|
db.refresh(unit)
|
|
return unit
|
|
|
|
|
|
async def sync_slm_to_slmm_cache(
|
|
unit_id: str,
|
|
host: str = None,
|
|
tcp_port: int = None,
|
|
ftp_port: int = None,
|
|
ftp_username: str = None,
|
|
ftp_password: str = None,
|
|
deployed_with_modem_id: str = None,
|
|
db: Session = None
|
|
) -> dict:
|
|
"""
|
|
Sync SLM device configuration to SLMM backend cache.
|
|
|
|
Terra-View is the source of truth for device configs. This function updates
|
|
SLMM's config cache (NL43Config table) so SLMM can look up device connection
|
|
info by unit_id without Terra-View passing host:port with every request.
|
|
|
|
Args:
|
|
unit_id: Unique identifier for the SLM device
|
|
host: Direct IP address/hostname OR will be resolved from modem
|
|
tcp_port: TCP control port (default: 2255)
|
|
ftp_port: FTP port (default: 21)
|
|
ftp_username: FTP username (optional)
|
|
ftp_password: FTP password (optional)
|
|
deployed_with_modem_id: If set, resolve modem IP as host
|
|
db: Database session for modem lookup
|
|
|
|
Returns:
|
|
dict: {"success": bool, "message": str}
|
|
"""
|
|
# Resolve host from modem if assigned
|
|
if deployed_with_modem_id and db:
|
|
modem = db.query(RosterUnit).filter_by(
|
|
id=deployed_with_modem_id,
|
|
device_type="modem"
|
|
).first()
|
|
if modem and modem.ip_address:
|
|
host = modem.ip_address
|
|
logger.info(f"Resolved host from modem {deployed_with_modem_id}: {host}")
|
|
|
|
# Validate required fields
|
|
if not host:
|
|
logger.warning(f"Cannot sync SLM {unit_id} to SLMM: no host/IP address provided")
|
|
return {"success": False, "message": "No host IP address available"}
|
|
|
|
# Set defaults
|
|
tcp_port = tcp_port or 2255
|
|
ftp_port = ftp_port or 21
|
|
|
|
# Build SLMM cache payload
|
|
config_payload = {
|
|
"host": host,
|
|
"tcp_port": tcp_port,
|
|
"tcp_enabled": True,
|
|
"ftp_enabled": bool(ftp_username and ftp_password),
|
|
"web_enabled": False
|
|
}
|
|
|
|
if ftp_username and ftp_password:
|
|
config_payload["ftp_username"] = ftp_username
|
|
config_payload["ftp_password"] = ftp_password
|
|
|
|
# Call SLMM cache update API
|
|
slmm_url = f"{SLMM_BASE_URL}/api/nl43/{unit_id}/config"
|
|
|
|
try:
|
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
|
response = await client.put(slmm_url, json=config_payload)
|
|
|
|
if response.status_code in [200, 201]:
|
|
logger.info(f"Successfully synced SLM {unit_id} to SLMM cache")
|
|
return {"success": True, "message": "Device config cached in SLMM"}
|
|
else:
|
|
logger.error(f"SLMM cache sync failed for {unit_id}: HTTP {response.status_code}")
|
|
return {"success": False, "message": f"SLMM returned status {response.status_code}"}
|
|
|
|
except httpx.ConnectError:
|
|
logger.error(f"Cannot connect to SLMM service at {SLMM_BASE_URL}")
|
|
return {"success": False, "message": "SLMM service unavailable"}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error syncing SLM {unit_id} to SLMM: {e}")
|
|
return {"success": False, "message": str(e)}
|
|
|
|
|
|
@router.post("/add")
|
|
async def add_roster_unit(
|
|
id: str = Form(...),
|
|
device_type: str = Form("seismograph"),
|
|
unit_type: str = Form("series3"),
|
|
deployed: str = Form(None),
|
|
retired: str = Form(None),
|
|
note: str = Form(""),
|
|
project_id: str = Form(None),
|
|
location: str = Form(None),
|
|
address: str = Form(None),
|
|
coordinates: str = Form(None),
|
|
# Seismograph-specific fields
|
|
last_calibrated: str = Form(None),
|
|
next_calibration_due: str = Form(None),
|
|
deployed_with_modem_id: str = Form(None),
|
|
# Modem-specific fields
|
|
ip_address: str = Form(None),
|
|
phone_number: str = Form(None),
|
|
hardware_model: str = Form(None),
|
|
deployment_type: str = Form(None), # "seismograph" | "slm" - what device type modem is deployed with
|
|
deployed_with_unit_id: str = Form(None), # ID of seismograph/SLM this modem is deployed with
|
|
# Sound Level Meter-specific fields
|
|
slm_host: str = Form(None),
|
|
slm_tcp_port: str = Form(None),
|
|
slm_ftp_port: str = Form(None),
|
|
slm_model: str = Form(None),
|
|
slm_serial_number: str = Form(None),
|
|
slm_frequency_weighting: str = Form(None),
|
|
slm_time_weighting: str = Form(None),
|
|
slm_measurement_range: str = Form(None),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
logger.info(f"Adding unit: id={id}, device_type={device_type}, deployed={deployed}, retired={retired}")
|
|
|
|
# Convert boolean strings to actual booleans
|
|
deployed_bool = deployed in ['true', 'True', '1', 'yes'] if deployed else False
|
|
retired_bool = retired in ['true', 'True', '1', 'yes'] if retired else False
|
|
|
|
# Convert port strings to integers
|
|
slm_tcp_port_int = int(slm_tcp_port) if slm_tcp_port and slm_tcp_port.strip() else None
|
|
slm_ftp_port_int = int(slm_ftp_port) if slm_ftp_port and slm_ftp_port.strip() else None
|
|
|
|
if db.query(RosterUnit).filter(RosterUnit.id == id).first():
|
|
raise HTTPException(status_code=400, detail="Unit already exists")
|
|
|
|
# Parse date fields if provided
|
|
last_cal_date = None
|
|
if last_calibrated:
|
|
try:
|
|
last_cal_date = datetime.strptime(last_calibrated, "%Y-%m-%d").date()
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid last_calibrated date format. Use YYYY-MM-DD")
|
|
|
|
# Auto-calculate next_calibration_due from last_calibrated using calibration interval
|
|
next_cal_date = None
|
|
if last_cal_date:
|
|
cal_interval = get_calibration_interval(db)
|
|
next_cal_date = last_cal_date + timedelta(days=cal_interval)
|
|
elif next_calibration_due:
|
|
# Fallback: allow explicit setting if no last_calibrated
|
|
try:
|
|
next_cal_date = datetime.strptime(next_calibration_due, "%Y-%m-%d").date()
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid next_calibration_due date format. Use YYYY-MM-DD")
|
|
|
|
unit = RosterUnit(
|
|
id=id,
|
|
device_type=device_type,
|
|
unit_type=unit_type,
|
|
deployed=deployed_bool,
|
|
retired=retired_bool,
|
|
note=note,
|
|
project_id=project_id,
|
|
location=location,
|
|
address=address,
|
|
coordinates=coordinates,
|
|
last_updated=datetime.utcnow(),
|
|
# Seismograph-specific fields
|
|
last_calibrated=last_cal_date,
|
|
next_calibration_due=next_cal_date,
|
|
deployed_with_modem_id=deployed_with_modem_id if deployed_with_modem_id else None,
|
|
# Modem-specific fields
|
|
ip_address=ip_address if ip_address else None,
|
|
phone_number=phone_number if phone_number else None,
|
|
hardware_model=hardware_model if hardware_model else None,
|
|
deployment_type=deployment_type if deployment_type else None,
|
|
# Sound Level Meter-specific fields
|
|
slm_host=slm_host if slm_host else None,
|
|
slm_tcp_port=slm_tcp_port_int,
|
|
slm_ftp_port=slm_ftp_port_int,
|
|
slm_model=slm_model if slm_model else None,
|
|
slm_serial_number=slm_serial_number if slm_serial_number else None,
|
|
slm_frequency_weighting=slm_frequency_weighting if slm_frequency_weighting else None,
|
|
slm_time_weighting=slm_time_weighting if slm_time_weighting else None,
|
|
slm_measurement_range=slm_measurement_range if slm_measurement_range else None,
|
|
)
|
|
|
|
# Auto-fill location data from modem if pairing and fields are empty
|
|
if deployed_with_modem_id:
|
|
modem = db.query(RosterUnit).filter(
|
|
RosterUnit.id == deployed_with_modem_id,
|
|
RosterUnit.device_type == "modem"
|
|
).first()
|
|
if modem:
|
|
if not unit.location and modem.location:
|
|
unit.location = modem.location
|
|
if not unit.address and modem.address:
|
|
unit.address = modem.address
|
|
if not unit.coordinates and modem.coordinates:
|
|
unit.coordinates = modem.coordinates
|
|
if not unit.project_id and modem.project_id:
|
|
unit.project_id = modem.project_id
|
|
|
|
db.add(unit)
|
|
db.commit()
|
|
|
|
# If sound level meter, sync config to SLMM cache
|
|
if device_type == "slm":
|
|
logger.info(f"Syncing SLM {id} config to SLMM cache...")
|
|
result = await sync_slm_to_slmm_cache(
|
|
unit_id=id,
|
|
host=slm_host,
|
|
tcp_port=slm_tcp_port_int,
|
|
ftp_port=slm_ftp_port_int,
|
|
deployed_with_modem_id=deployed_with_modem_id,
|
|
db=db
|
|
)
|
|
|
|
if not result["success"]:
|
|
logger.warning(f"SLMM cache sync warning for {id}: {result['message']}")
|
|
# Don't fail the operation - device is still added to Terra-View roster
|
|
# User can manually sync later or SLMM will be synced on next config update
|
|
|
|
return {"message": "Unit added", "id": id, "device_type": device_type}
|
|
|
|
|
|
@router.get("/modems")
|
|
def get_modems_list(db: Session = Depends(get_db)):
|
|
"""Get list of all modem units for dropdown selection"""
|
|
modems = db.query(RosterUnit).filter_by(device_type="modem", retired=False).order_by(RosterUnit.id).all()
|
|
|
|
return [
|
|
{
|
|
"id": modem.id,
|
|
"ip_address": modem.ip_address,
|
|
"phone_number": modem.phone_number,
|
|
"hardware_model": modem.hardware_model,
|
|
"deployed": modem.deployed
|
|
}
|
|
for modem in modems
|
|
]
|
|
|
|
|
|
@router.get("/search/modems")
|
|
def search_modems(
|
|
request: Request,
|
|
q: str = Query("", description="Search term"),
|
|
deployed_only: bool = Query(False, description="Only show deployed modems"),
|
|
exclude_retired: bool = Query(True, description="Exclude retired modems"),
|
|
limit: int = Query(10, le=50),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Search modems by ID, IP address, or note. Returns HTML partial for HTMX dropdown.
|
|
|
|
Used by modem picker component to find modems to link with seismographs/SLMs.
|
|
"""
|
|
from fastapi.responses import HTMLResponse
|
|
from fastapi.templating import Jinja2Templates
|
|
|
|
templates = Jinja2Templates(directory="templates")
|
|
|
|
query = db.query(RosterUnit).filter(RosterUnit.device_type == "modem")
|
|
|
|
if deployed_only:
|
|
query = query.filter(RosterUnit.deployed == True)
|
|
|
|
if exclude_retired:
|
|
query = query.filter(RosterUnit.retired == False)
|
|
|
|
# Search by ID, IP address, or note
|
|
if q and q.strip():
|
|
search_term = f"%{q.strip()}%"
|
|
query = query.filter(
|
|
(RosterUnit.id.ilike(search_term)) |
|
|
(RosterUnit.ip_address.ilike(search_term)) |
|
|
(RosterUnit.note.ilike(search_term))
|
|
)
|
|
|
|
modems = query.order_by(RosterUnit.id).limit(limit).all()
|
|
|
|
# Build results
|
|
results = []
|
|
for modem in modems:
|
|
# Build display text: ID - IP - Note (if available)
|
|
display_parts = [modem.id]
|
|
if modem.ip_address:
|
|
display_parts.append(modem.ip_address)
|
|
if modem.note:
|
|
display_parts.append(modem.note)
|
|
display = " - ".join(display_parts)
|
|
|
|
results.append({
|
|
"id": modem.id,
|
|
"ip_address": modem.ip_address or "",
|
|
"phone_number": modem.phone_number or "",
|
|
"note": modem.note or "",
|
|
"deployed": modem.deployed,
|
|
"display": display
|
|
})
|
|
|
|
# Determine if we should show "no results" message
|
|
show_empty = len(results) == 0 and q and q.strip()
|
|
|
|
return templates.TemplateResponse(
|
|
"partials/modem_search_results.html",
|
|
{
|
|
"request": request,
|
|
"modems": results,
|
|
"query": q,
|
|
"show_empty": show_empty
|
|
}
|
|
)
|
|
|
|
|
|
@router.get("/search/units")
|
|
def search_units(
|
|
request: Request,
|
|
q: str = Query("", description="Search term"),
|
|
device_type: str = Query(None, description="Filter by device type: seismograph, modem, slm"),
|
|
deployed_only: bool = Query(False, description="Only show deployed units"),
|
|
exclude_retired: bool = Query(True, description="Exclude retired units"),
|
|
limit: int = Query(10, le=50),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Search roster units by ID or note. Returns HTML partial for HTMX dropdown.
|
|
|
|
Used by unit picker component to find seismographs/SLMs to link with modems.
|
|
"""
|
|
from fastapi.responses import HTMLResponse
|
|
from fastapi.templating import Jinja2Templates
|
|
|
|
templates = Jinja2Templates(directory="templates")
|
|
|
|
query = db.query(RosterUnit)
|
|
|
|
# Apply filters
|
|
if device_type:
|
|
query = query.filter(RosterUnit.device_type == device_type)
|
|
|
|
if deployed_only:
|
|
query = query.filter(RosterUnit.deployed == True)
|
|
|
|
if exclude_retired:
|
|
query = query.filter(RosterUnit.retired == False)
|
|
|
|
# Search by ID or note
|
|
if q and q.strip():
|
|
search_term = f"%{q.strip()}%"
|
|
query = query.filter(
|
|
(RosterUnit.id.ilike(search_term)) |
|
|
(RosterUnit.note.ilike(search_term))
|
|
)
|
|
|
|
units = query.order_by(RosterUnit.id).limit(limit).all()
|
|
|
|
# Build results
|
|
results = []
|
|
for unit in units:
|
|
results.append({
|
|
"id": unit.id,
|
|
"device_type": unit.device_type or "seismograph",
|
|
"note": unit.note or "",
|
|
"deployed": unit.deployed,
|
|
"display": f"{unit.id}" + (f" - {unit.note}" if unit.note else "")
|
|
})
|
|
|
|
# Determine if we should show "no results" message
|
|
show_empty = len(results) == 0 and q and q.strip()
|
|
|
|
return templates.TemplateResponse(
|
|
"partials/unit_search_results.html",
|
|
{
|
|
"request": request,
|
|
"units": results,
|
|
"query": q,
|
|
"show_empty": show_empty
|
|
}
|
|
)
|
|
|
|
|
|
@router.get("/{unit_id}")
|
|
def get_roster_unit(unit_id: str, db: Session = Depends(get_db)):
|
|
"""Get a single roster unit by ID"""
|
|
unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
|
if not unit:
|
|
raise HTTPException(status_code=404, detail="Unit not found")
|
|
|
|
return {
|
|
"id": unit.id,
|
|
"device_type": unit.device_type or "seismograph",
|
|
"unit_type": unit.unit_type,
|
|
"deployed": unit.deployed,
|
|
"retired": unit.retired,
|
|
"note": unit.note or "",
|
|
"project_id": unit.project_id or "",
|
|
"location": unit.location or "",
|
|
"address": unit.address or "",
|
|
"coordinates": unit.coordinates or "",
|
|
"last_calibrated": unit.last_calibrated.isoformat() if unit.last_calibrated else "",
|
|
"next_calibration_due": unit.next_calibration_due.isoformat() if unit.next_calibration_due else "",
|
|
"deployed_with_modem_id": unit.deployed_with_modem_id or "",
|
|
"ip_address": unit.ip_address or "",
|
|
"phone_number": unit.phone_number or "",
|
|
"hardware_model": unit.hardware_model or "",
|
|
"deployment_type": unit.deployment_type or "",
|
|
"deployed_with_unit_id": unit.deployed_with_unit_id or "",
|
|
"slm_host": unit.slm_host or "",
|
|
"slm_tcp_port": unit.slm_tcp_port or "",
|
|
"slm_ftp_port": unit.slm_ftp_port or "",
|
|
"slm_model": unit.slm_model or "",
|
|
"slm_serial_number": unit.slm_serial_number or "",
|
|
"slm_frequency_weighting": unit.slm_frequency_weighting or "",
|
|
"slm_time_weighting": unit.slm_time_weighting or "",
|
|
"slm_measurement_range": unit.slm_measurement_range or "",
|
|
}
|
|
|
|
|
|
@router.post("/edit/{unit_id}")
|
|
async def edit_roster_unit(
|
|
unit_id: str,
|
|
device_type: str = Form("seismograph"),
|
|
unit_type: str = Form("series3"),
|
|
deployed: str = Form(None),
|
|
retired: str = Form(None),
|
|
note: str = Form(""),
|
|
project_id: str = Form(None),
|
|
location: str = Form(None),
|
|
address: str = Form(None),
|
|
coordinates: str = Form(None),
|
|
# Seismograph-specific fields
|
|
last_calibrated: str = Form(None),
|
|
next_calibration_due: str = Form(None),
|
|
deployed_with_modem_id: str = Form(None),
|
|
# Modem-specific fields
|
|
ip_address: str = Form(None),
|
|
phone_number: str = Form(None),
|
|
hardware_model: str = Form(None),
|
|
deployment_type: str = Form(None),
|
|
deployed_with_unit_id: str = Form(None),
|
|
# Sound Level Meter-specific fields
|
|
slm_host: str = Form(None),
|
|
slm_tcp_port: str = Form(None),
|
|
slm_ftp_port: str = Form(None),
|
|
slm_model: str = Form(None),
|
|
slm_serial_number: str = Form(None),
|
|
slm_frequency_weighting: str = Form(None),
|
|
slm_time_weighting: str = Form(None),
|
|
slm_measurement_range: str = Form(None),
|
|
# Cascade options - sync fields to paired device
|
|
cascade_to_unit_id: str = Form(None),
|
|
cascade_deployed: str = Form(None),
|
|
cascade_retired: str = Form(None),
|
|
cascade_project: str = Form(None),
|
|
cascade_location: str = Form(None),
|
|
cascade_coordinates: str = Form(None),
|
|
cascade_note: str = Form(None),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
|
if not unit:
|
|
raise HTTPException(status_code=404, detail="Unit not found")
|
|
|
|
# Convert boolean strings to actual booleans
|
|
deployed_bool = deployed in ['true', 'True', '1', 'yes'] if deployed else False
|
|
retired_bool = retired in ['true', 'True', '1', 'yes'] if retired else False
|
|
|
|
# Convert port strings to integers
|
|
slm_tcp_port_int = int(slm_tcp_port) if slm_tcp_port and slm_tcp_port.strip() else None
|
|
slm_ftp_port_int = int(slm_ftp_port) if slm_ftp_port and slm_ftp_port.strip() else None
|
|
|
|
# Parse date fields if provided
|
|
last_cal_date = None
|
|
if last_calibrated:
|
|
try:
|
|
last_cal_date = datetime.strptime(last_calibrated, "%Y-%m-%d").date()
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid last_calibrated date format. Use YYYY-MM-DD")
|
|
|
|
# Auto-calculate next_calibration_due from last_calibrated using calibration interval
|
|
next_cal_date = None
|
|
if last_cal_date:
|
|
cal_interval = get_calibration_interval(db)
|
|
next_cal_date = last_cal_date + timedelta(days=cal_interval)
|
|
elif next_calibration_due:
|
|
# Fallback: allow explicit setting if no last_calibrated
|
|
try:
|
|
next_cal_date = datetime.strptime(next_calibration_due, "%Y-%m-%d").date()
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid next_calibration_due date format. Use YYYY-MM-DD")
|
|
|
|
# Track changes for history
|
|
old_note = unit.note
|
|
old_deployed = unit.deployed
|
|
old_retired = unit.retired
|
|
|
|
# Update all fields
|
|
unit.device_type = device_type
|
|
unit.unit_type = unit_type
|
|
unit.deployed = deployed_bool
|
|
unit.retired = retired_bool
|
|
unit.note = note
|
|
unit.project_id = project_id
|
|
unit.location = location
|
|
unit.address = address
|
|
unit.coordinates = coordinates
|
|
unit.last_updated = datetime.utcnow()
|
|
|
|
# Seismograph-specific fields
|
|
unit.last_calibrated = last_cal_date
|
|
unit.next_calibration_due = next_cal_date
|
|
unit.deployed_with_modem_id = deployed_with_modem_id if deployed_with_modem_id else None
|
|
|
|
# Auto-fill location data from modem if pairing and fields are empty
|
|
if deployed_with_modem_id:
|
|
modem = db.query(RosterUnit).filter(
|
|
RosterUnit.id == deployed_with_modem_id,
|
|
RosterUnit.device_type == "modem"
|
|
).first()
|
|
if modem:
|
|
# Only fill if the device field is empty
|
|
if not unit.location and modem.location:
|
|
unit.location = modem.location
|
|
if not unit.address and modem.address:
|
|
unit.address = modem.address
|
|
if not unit.coordinates and modem.coordinates:
|
|
unit.coordinates = modem.coordinates
|
|
if not unit.project_id and modem.project_id:
|
|
unit.project_id = modem.project_id
|
|
|
|
# Modem-specific fields
|
|
unit.ip_address = ip_address if ip_address else None
|
|
unit.phone_number = phone_number if phone_number else None
|
|
unit.hardware_model = hardware_model if hardware_model else None
|
|
unit.deployment_type = deployment_type if deployment_type else None
|
|
unit.deployed_with_unit_id = deployed_with_unit_id if deployed_with_unit_id else None
|
|
|
|
# Sound Level Meter-specific fields
|
|
unit.slm_host = slm_host if slm_host else None
|
|
unit.slm_tcp_port = slm_tcp_port_int
|
|
unit.slm_ftp_port = slm_ftp_port_int
|
|
unit.slm_model = slm_model if slm_model else None
|
|
unit.slm_serial_number = slm_serial_number if slm_serial_number else None
|
|
unit.slm_frequency_weighting = slm_frequency_weighting if slm_frequency_weighting else None
|
|
unit.slm_time_weighting = slm_time_weighting if slm_time_weighting else None
|
|
unit.slm_measurement_range = slm_measurement_range if slm_measurement_range else None
|
|
|
|
# Record history entries for changed fields
|
|
if old_note != note:
|
|
record_history(db, unit_id, "note_change", "note", old_note, note, "manual")
|
|
|
|
if old_deployed != deployed:
|
|
status_text = "deployed" if deployed else "benched"
|
|
old_status_text = "deployed" if old_deployed else "benched"
|
|
record_history(db, unit_id, "deployed_change", "deployed", old_status_text, status_text, "manual")
|
|
|
|
if old_retired != retired:
|
|
status_text = "retired" if retired else "active"
|
|
old_status_text = "retired" if old_retired else "active"
|
|
record_history(db, unit_id, "retired_change", "retired", old_status_text, status_text, "manual")
|
|
|
|
# Handle cascade to paired device
|
|
cascaded_unit_id = None
|
|
if cascade_to_unit_id and cascade_to_unit_id.strip():
|
|
paired_unit = db.query(RosterUnit).filter(RosterUnit.id == cascade_to_unit_id).first()
|
|
if paired_unit:
|
|
cascaded_unit_id = paired_unit.id
|
|
|
|
# Cascade deployed status
|
|
if cascade_deployed in ['true', 'True', '1', 'yes']:
|
|
old_paired_deployed = paired_unit.deployed
|
|
paired_unit.deployed = deployed_bool
|
|
paired_unit.last_updated = datetime.utcnow()
|
|
if old_paired_deployed != deployed_bool:
|
|
status_text = "deployed" if deployed_bool else "benched"
|
|
old_status_text = "deployed" if old_paired_deployed else "benched"
|
|
record_history(db, paired_unit.id, "deployed_change", "deployed",
|
|
old_status_text, status_text, f"cascade from {unit_id}")
|
|
|
|
# Cascade retired status
|
|
if cascade_retired in ['true', 'True', '1', 'yes']:
|
|
old_paired_retired = paired_unit.retired
|
|
paired_unit.retired = retired_bool
|
|
paired_unit.last_updated = datetime.utcnow()
|
|
if old_paired_retired != retired_bool:
|
|
status_text = "retired" if retired_bool else "active"
|
|
old_status_text = "retired" if old_paired_retired else "active"
|
|
record_history(db, paired_unit.id, "retired_change", "retired",
|
|
old_status_text, status_text, f"cascade from {unit_id}")
|
|
|
|
# Cascade project
|
|
if cascade_project in ['true', 'True', '1', 'yes']:
|
|
old_paired_project = paired_unit.project_id
|
|
paired_unit.project_id = project_id
|
|
paired_unit.last_updated = datetime.utcnow()
|
|
if old_paired_project != project_id:
|
|
record_history(db, paired_unit.id, "project_change", "project_id",
|
|
old_paired_project or "", project_id or "", f"cascade from {unit_id}")
|
|
|
|
# Cascade address/location
|
|
if cascade_location in ['true', 'True', '1', 'yes']:
|
|
old_paired_address = paired_unit.address
|
|
old_paired_location = paired_unit.location
|
|
paired_unit.address = address
|
|
paired_unit.location = location
|
|
paired_unit.last_updated = datetime.utcnow()
|
|
if old_paired_address != address:
|
|
record_history(db, paired_unit.id, "address_change", "address",
|
|
old_paired_address or "", address or "", f"cascade from {unit_id}")
|
|
|
|
# Cascade coordinates
|
|
if cascade_coordinates in ['true', 'True', '1', 'yes']:
|
|
old_paired_coords = paired_unit.coordinates
|
|
paired_unit.coordinates = coordinates
|
|
paired_unit.last_updated = datetime.utcnow()
|
|
if old_paired_coords != coordinates:
|
|
record_history(db, paired_unit.id, "coordinates_change", "coordinates",
|
|
old_paired_coords or "", coordinates or "", f"cascade from {unit_id}")
|
|
|
|
# Cascade note
|
|
if cascade_note in ['true', 'True', '1', 'yes']:
|
|
old_paired_note = paired_unit.note
|
|
paired_unit.note = note
|
|
paired_unit.last_updated = datetime.utcnow()
|
|
if old_paired_note != note:
|
|
record_history(db, paired_unit.id, "note_change", "note",
|
|
old_paired_note or "", note or "", f"cascade from {unit_id}")
|
|
|
|
db.commit()
|
|
|
|
# Sync SLM polling config to SLMM when deployed/retired status changes
|
|
# This ensures benched units stop being polled
|
|
if device_type == "slm" and (old_deployed != deployed_bool or old_retired != retired_bool):
|
|
db.refresh(unit) # Refresh to get committed values
|
|
try:
|
|
await sync_slm_to_slmm(unit)
|
|
logger.info(f"Synced SLM {unit_id} polling config to SLMM (deployed={deployed_bool}, retired={retired_bool})")
|
|
except Exception as e:
|
|
logger.warning(f"Failed to sync SLM {unit_id} polling config to SLMM: {e}")
|
|
|
|
response = {"message": "Unit updated", "id": unit_id, "device_type": device_type}
|
|
if cascaded_unit_id:
|
|
response["cascaded_to"] = cascaded_unit_id
|
|
return response
|
|
|
|
|
|
@router.post("/set-deployed/{unit_id}")
|
|
async def set_deployed(unit_id: str, deployed: bool = Form(...), db: Session = Depends(get_db)):
|
|
unit = get_or_create_roster_unit(db, unit_id)
|
|
old_deployed = unit.deployed
|
|
unit.deployed = deployed
|
|
unit.last_updated = datetime.utcnow()
|
|
|
|
# Record history entry for deployed status change
|
|
if old_deployed != deployed:
|
|
status_text = "deployed" if deployed else "benched"
|
|
old_status_text = "deployed" if old_deployed else "benched"
|
|
record_history(
|
|
db=db,
|
|
unit_id=unit_id,
|
|
change_type="deployed_change",
|
|
field_name="deployed",
|
|
old_value=old_status_text,
|
|
new_value=status_text,
|
|
source="manual"
|
|
)
|
|
|
|
db.commit()
|
|
|
|
# Sync SLM polling config to SLMM when deployed status changes
|
|
if unit.device_type == "slm" and old_deployed != deployed:
|
|
db.refresh(unit)
|
|
try:
|
|
await sync_slm_to_slmm(unit)
|
|
logger.info(f"Synced SLM {unit_id} polling config to SLMM (deployed={deployed})")
|
|
except Exception as e:
|
|
logger.warning(f"Failed to sync SLM {unit_id} polling config to SLMM: {e}")
|
|
|
|
return {"message": "Updated", "id": unit_id, "deployed": deployed}
|
|
|
|
|
|
@router.post("/set-retired/{unit_id}")
|
|
async def set_retired(unit_id: str, retired: bool = Form(...), db: Session = Depends(get_db)):
|
|
unit = get_or_create_roster_unit(db, unit_id)
|
|
old_retired = unit.retired
|
|
unit.retired = retired
|
|
unit.last_updated = datetime.utcnow()
|
|
|
|
# Record history entry for retired status change
|
|
if old_retired != retired:
|
|
status_text = "retired" if retired else "active"
|
|
old_status_text = "retired" if old_retired else "active"
|
|
record_history(
|
|
db=db,
|
|
unit_id=unit_id,
|
|
change_type="retired_change",
|
|
field_name="retired",
|
|
old_value=old_status_text,
|
|
new_value=status_text,
|
|
source="manual"
|
|
)
|
|
|
|
db.commit()
|
|
|
|
# Sync SLM polling config to SLMM when retired status changes
|
|
if unit.device_type == "slm" and old_retired != retired:
|
|
db.refresh(unit)
|
|
try:
|
|
await sync_slm_to_slmm(unit)
|
|
logger.info(f"Synced SLM {unit_id} polling config to SLMM (retired={retired})")
|
|
except Exception as e:
|
|
logger.warning(f"Failed to sync SLM {unit_id} polling config to SLMM: {e}")
|
|
|
|
return {"message": "Updated", "id": unit_id, "retired": retired}
|
|
|
|
|
|
@router.delete("/{unit_id}")
|
|
async def delete_roster_unit(unit_id: str, db: Session = Depends(get_db)):
|
|
"""
|
|
Permanently delete a unit from the database.
|
|
Checks roster, emitters, and ignored_units tables and deletes from any table where the unit exists.
|
|
|
|
For SLM devices, also removes from SLMM to stop background polling.
|
|
"""
|
|
deleted = False
|
|
was_slm = False
|
|
|
|
# Try to delete from roster table
|
|
roster_unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
|
if roster_unit:
|
|
was_slm = roster_unit.device_type == "slm"
|
|
db.delete(roster_unit)
|
|
deleted = True
|
|
|
|
# Try to delete from emitters table
|
|
emitter = db.query(Emitter).filter(Emitter.id == unit_id).first()
|
|
if emitter:
|
|
db.delete(emitter)
|
|
deleted = True
|
|
|
|
# Try to delete from ignored_units table
|
|
ignored_unit = db.query(IgnoredUnit).filter(IgnoredUnit.id == unit_id).first()
|
|
if ignored_unit:
|
|
db.delete(ignored_unit)
|
|
deleted = True
|
|
|
|
# If not found in any table, return error
|
|
if not deleted:
|
|
raise HTTPException(status_code=404, detail="Unit not found")
|
|
|
|
db.commit()
|
|
|
|
# If it was an SLM, also delete from SLMM
|
|
if was_slm:
|
|
try:
|
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
|
response = await client.delete(f"{SLMM_BASE_URL}/api/nl43/{unit_id}/config")
|
|
if response.status_code in [200, 404]:
|
|
logger.info(f"Deleted SLM {unit_id} from SLMM")
|
|
else:
|
|
logger.warning(f"Failed to delete SLM {unit_id} from SLMM: {response.status_code}")
|
|
except Exception as e:
|
|
logger.error(f"Error deleting SLM {unit_id} from SLMM: {e}")
|
|
|
|
return {"message": "Unit deleted", "id": unit_id}
|
|
|
|
|
|
@router.post("/set-note/{unit_id}")
|
|
def set_note(unit_id: str, note: str = Form(""), db: Session = Depends(get_db)):
|
|
unit = get_or_create_roster_unit(db, unit_id)
|
|
old_note = unit.note
|
|
unit.note = note
|
|
unit.last_updated = datetime.utcnow()
|
|
|
|
# Record history entry for note change
|
|
if old_note != note:
|
|
record_history(
|
|
db=db,
|
|
unit_id=unit_id,
|
|
change_type="note_change",
|
|
field_name="note",
|
|
old_value=old_note,
|
|
new_value=note,
|
|
source="manual"
|
|
)
|
|
|
|
db.commit()
|
|
return {"message": "Updated", "id": unit_id, "note": note}
|
|
|
|
|
|
def _parse_bool(value: str) -> bool:
|
|
"""Parse boolean from CSV string value."""
|
|
return value.lower() in ('true', '1', 'yes') if value else False
|
|
|
|
|
|
def _parse_int(value: str) -> int | None:
|
|
"""Parse integer from CSV string value, return None if empty or invalid."""
|
|
if not value or not value.strip():
|
|
return None
|
|
try:
|
|
return int(value.strip())
|
|
except ValueError:
|
|
return None
|
|
|
|
|
|
def _parse_date(value: str) -> date | None:
|
|
"""Parse date from CSV string value (YYYY-MM-DD format)."""
|
|
if not value or not value.strip():
|
|
return None
|
|
try:
|
|
return datetime.strptime(value.strip(), '%Y-%m-%d').date()
|
|
except ValueError:
|
|
return None
|
|
|
|
|
|
def _get_csv_value(row: dict, key: str, default=None):
|
|
"""Get value from CSV row, return default if empty."""
|
|
value = row.get(key, '').strip() if row.get(key) else ''
|
|
return value if value else default
|
|
|
|
|
|
@router.post("/import-csv")
|
|
async def import_csv(
|
|
file: UploadFile = File(...),
|
|
update_existing: bool = Form(True),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Import roster units from CSV file.
|
|
|
|
Expected CSV columns (unit_id is required, others are optional):
|
|
|
|
Common fields (all device types):
|
|
- unit_id: Unique identifier for the unit (REQUIRED)
|
|
- device_type: "seismograph", "modem", or "slm" (default: "seismograph")
|
|
- unit_type: Sub-type (e.g., "series3", "series4" for seismographs)
|
|
- deployed: Boolean (true/false/yes/no/1/0)
|
|
- retired: Boolean
|
|
- note: Notes about the unit
|
|
- project_id: Project identifier
|
|
- location: Location description
|
|
- address: Street address
|
|
- coordinates: GPS coordinates (lat;lon or lat,lon)
|
|
|
|
Seismograph-specific:
|
|
- last_calibrated: Date (YYYY-MM-DD)
|
|
- next_calibration_due: Date (YYYY-MM-DD)
|
|
- deployed_with_modem_id: ID of paired modem
|
|
|
|
Modem-specific:
|
|
- ip_address: Device IP address
|
|
- phone_number: SIM card phone number
|
|
- hardware_model: Hardware model (e.g., IBR900, RV55)
|
|
|
|
SLM-specific:
|
|
- slm_host: Device IP or hostname
|
|
- slm_tcp_port: TCP control port (default 2255)
|
|
- slm_ftp_port: FTP port (default 21)
|
|
- slm_model: Device model (NL-43, NL-53)
|
|
- slm_serial_number: Serial number
|
|
- slm_frequency_weighting: A, C, or Z
|
|
- slm_time_weighting: F (Fast), S (Slow), I (Impulse)
|
|
- slm_measurement_range: e.g., "30-130 dB"
|
|
|
|
Lines starting with # are treated as comments and skipped.
|
|
|
|
Args:
|
|
file: CSV file upload
|
|
update_existing: If True, update existing units; if False, skip them
|
|
"""
|
|
|
|
if not file.filename.endswith('.csv'):
|
|
raise HTTPException(status_code=400, detail="File must be a CSV")
|
|
|
|
# Read file content
|
|
contents = await file.read()
|
|
csv_text = contents.decode('utf-8')
|
|
|
|
# Filter out comment lines (starting with #)
|
|
lines = csv_text.split('\n')
|
|
filtered_lines = [line for line in lines if not line.strip().startswith('#')]
|
|
csv_text = '\n'.join(filtered_lines)
|
|
|
|
# First pass: validate for duplicates and empty unit_ids
|
|
csv_reader = csv.DictReader(io.StringIO(csv_text))
|
|
seen_unit_ids = {} # unit_id -> list of row numbers
|
|
empty_unit_id_rows = []
|
|
|
|
for row_num, row in enumerate(csv_reader, start=2):
|
|
unit_id = row.get('unit_id', '').strip()
|
|
if not unit_id:
|
|
empty_unit_id_rows.append(row_num)
|
|
else:
|
|
if unit_id not in seen_unit_ids:
|
|
seen_unit_ids[unit_id] = []
|
|
seen_unit_ids[unit_id].append(row_num)
|
|
|
|
# Check for validation errors
|
|
validation_errors = []
|
|
|
|
# Report empty unit_ids
|
|
if empty_unit_id_rows:
|
|
validation_errors.append(f"Empty unit_id on row(s): {', '.join(map(str, empty_unit_id_rows))}")
|
|
|
|
# Report duplicates
|
|
duplicates = {uid: rows for uid, rows in seen_unit_ids.items() if len(rows) > 1}
|
|
if duplicates:
|
|
for uid, rows in duplicates.items():
|
|
validation_errors.append(f"Duplicate unit_id '{uid}' on rows: {', '.join(map(str, rows))}")
|
|
|
|
if validation_errors:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="CSV validation failed:\n" + "\n".join(validation_errors)
|
|
)
|
|
|
|
# Second pass: actually import the data
|
|
csv_reader = csv.DictReader(io.StringIO(csv_text))
|
|
|
|
results = {
|
|
"added": [],
|
|
"updated": [],
|
|
"skipped": [],
|
|
"errors": []
|
|
}
|
|
|
|
for row_num, row in enumerate(csv_reader, start=2): # Start at 2 to account for header
|
|
try:
|
|
# Validate required field
|
|
unit_id = row.get('unit_id', '').strip()
|
|
if not unit_id:
|
|
results["errors"].append({
|
|
"row": row_num,
|
|
"error": "Missing required field: unit_id"
|
|
})
|
|
continue
|
|
|
|
# Determine device type
|
|
device_type = _get_csv_value(row, 'device_type', 'seismograph')
|
|
|
|
# Check if unit exists
|
|
existing_unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
|
|
|
if existing_unit:
|
|
if not update_existing:
|
|
results["skipped"].append(unit_id)
|
|
continue
|
|
|
|
# Update existing unit - common fields
|
|
existing_unit.device_type = device_type
|
|
existing_unit.unit_type = _get_csv_value(row, 'unit_type', existing_unit.unit_type or 'series3')
|
|
existing_unit.deployed = _parse_bool(row.get('deployed', '')) if row.get('deployed') else existing_unit.deployed
|
|
existing_unit.retired = _parse_bool(row.get('retired', '')) if row.get('retired') else existing_unit.retired
|
|
existing_unit.note = _get_csv_value(row, 'note', existing_unit.note)
|
|
existing_unit.project_id = _get_csv_value(row, 'project_id', existing_unit.project_id)
|
|
existing_unit.location = _get_csv_value(row, 'location', existing_unit.location)
|
|
existing_unit.address = _get_csv_value(row, 'address', existing_unit.address)
|
|
existing_unit.coordinates = _get_csv_value(row, 'coordinates', existing_unit.coordinates)
|
|
existing_unit.last_updated = datetime.utcnow()
|
|
|
|
# Seismograph-specific fields
|
|
if row.get('last_calibrated'):
|
|
last_cal = _parse_date(row.get('last_calibrated'))
|
|
existing_unit.last_calibrated = last_cal
|
|
# Auto-calculate next_calibration_due using calibration interval
|
|
if last_cal:
|
|
cal_interval = get_calibration_interval(db)
|
|
existing_unit.next_calibration_due = last_cal + timedelta(days=cal_interval)
|
|
elif row.get('next_calibration_due'):
|
|
# Only use explicit next_calibration_due if no last_calibrated
|
|
existing_unit.next_calibration_due = _parse_date(row.get('next_calibration_due'))
|
|
if row.get('deployed_with_modem_id'):
|
|
existing_unit.deployed_with_modem_id = _get_csv_value(row, 'deployed_with_modem_id')
|
|
|
|
# Modem-specific fields
|
|
if row.get('ip_address'):
|
|
existing_unit.ip_address = _get_csv_value(row, 'ip_address')
|
|
if row.get('phone_number'):
|
|
existing_unit.phone_number = _get_csv_value(row, 'phone_number')
|
|
if row.get('hardware_model'):
|
|
existing_unit.hardware_model = _get_csv_value(row, 'hardware_model')
|
|
if row.get('deployment_type'):
|
|
existing_unit.deployment_type = _get_csv_value(row, 'deployment_type')
|
|
if row.get('deployed_with_unit_id'):
|
|
existing_unit.deployed_with_unit_id = _get_csv_value(row, 'deployed_with_unit_id')
|
|
|
|
# SLM-specific fields
|
|
if row.get('slm_host'):
|
|
existing_unit.slm_host = _get_csv_value(row, 'slm_host')
|
|
if row.get('slm_tcp_port'):
|
|
existing_unit.slm_tcp_port = _parse_int(row.get('slm_tcp_port'))
|
|
if row.get('slm_ftp_port'):
|
|
existing_unit.slm_ftp_port = _parse_int(row.get('slm_ftp_port'))
|
|
if row.get('slm_model'):
|
|
existing_unit.slm_model = _get_csv_value(row, 'slm_model')
|
|
if row.get('slm_serial_number'):
|
|
existing_unit.slm_serial_number = _get_csv_value(row, 'slm_serial_number')
|
|
if row.get('slm_frequency_weighting'):
|
|
existing_unit.slm_frequency_weighting = _get_csv_value(row, 'slm_frequency_weighting')
|
|
if row.get('slm_time_weighting'):
|
|
existing_unit.slm_time_weighting = _get_csv_value(row, 'slm_time_weighting')
|
|
if row.get('slm_measurement_range'):
|
|
existing_unit.slm_measurement_range = _get_csv_value(row, 'slm_measurement_range')
|
|
|
|
results["updated"].append(unit_id)
|
|
else:
|
|
# Calculate next_calibration_due from last_calibrated
|
|
last_cal = _parse_date(row.get('last_calibrated', ''))
|
|
if last_cal:
|
|
cal_interval = get_calibration_interval(db)
|
|
next_cal = last_cal + timedelta(days=cal_interval)
|
|
else:
|
|
next_cal = _parse_date(row.get('next_calibration_due', ''))
|
|
|
|
# Create new unit with all fields
|
|
new_unit = RosterUnit(
|
|
id=unit_id,
|
|
device_type=device_type,
|
|
unit_type=_get_csv_value(row, 'unit_type', 'series3'),
|
|
deployed=_parse_bool(row.get('deployed', '')),
|
|
retired=_parse_bool(row.get('retired', '')),
|
|
note=_get_csv_value(row, 'note', ''),
|
|
project_id=_get_csv_value(row, 'project_id'),
|
|
location=_get_csv_value(row, 'location'),
|
|
address=_get_csv_value(row, 'address'),
|
|
coordinates=_get_csv_value(row, 'coordinates'),
|
|
last_updated=datetime.utcnow(),
|
|
# Seismograph fields - auto-calc next_calibration_due from last_calibrated
|
|
last_calibrated=last_cal,
|
|
next_calibration_due=next_cal,
|
|
deployed_with_modem_id=_get_csv_value(row, 'deployed_with_modem_id'),
|
|
# Modem fields
|
|
ip_address=_get_csv_value(row, 'ip_address'),
|
|
phone_number=_get_csv_value(row, 'phone_number'),
|
|
hardware_model=_get_csv_value(row, 'hardware_model'),
|
|
deployment_type=_get_csv_value(row, 'deployment_type'),
|
|
deployed_with_unit_id=_get_csv_value(row, 'deployed_with_unit_id'),
|
|
# SLM fields
|
|
slm_host=_get_csv_value(row, 'slm_host'),
|
|
slm_tcp_port=_parse_int(row.get('slm_tcp_port', '')),
|
|
slm_ftp_port=_parse_int(row.get('slm_ftp_port', '')),
|
|
slm_model=_get_csv_value(row, 'slm_model'),
|
|
slm_serial_number=_get_csv_value(row, 'slm_serial_number'),
|
|
slm_frequency_weighting=_get_csv_value(row, 'slm_frequency_weighting'),
|
|
slm_time_weighting=_get_csv_value(row, 'slm_time_weighting'),
|
|
slm_measurement_range=_get_csv_value(row, 'slm_measurement_range'),
|
|
)
|
|
db.add(new_unit)
|
|
results["added"].append(unit_id)
|
|
|
|
except Exception as e:
|
|
results["errors"].append({
|
|
"row": row_num,
|
|
"unit_id": row.get('unit_id', 'unknown'),
|
|
"error": str(e)
|
|
})
|
|
|
|
# Commit all changes
|
|
try:
|
|
db.commit()
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Database error: {str(e)}")
|
|
|
|
return {
|
|
"message": "CSV import completed",
|
|
"summary": {
|
|
"added": len(results["added"]),
|
|
"updated": len(results["updated"]),
|
|
"skipped": len(results["skipped"]),
|
|
"errors": len(results["errors"])
|
|
},
|
|
"details": results
|
|
}
|
|
|
|
|
|
@router.post("/ignore/{unit_id}")
|
|
def ignore_unit(unit_id: str, reason: str = Form(""), db: Session = Depends(get_db)):
|
|
"""
|
|
Add a unit to the ignore list to suppress it from unknown emitters.
|
|
"""
|
|
# Check if already ignored
|
|
if db.query(IgnoredUnit).filter(IgnoredUnit.id == unit_id).first():
|
|
raise HTTPException(status_code=400, detail="Unit already ignored")
|
|
|
|
ignored = IgnoredUnit(
|
|
id=unit_id,
|
|
reason=reason,
|
|
ignored_at=datetime.utcnow()
|
|
)
|
|
db.add(ignored)
|
|
db.commit()
|
|
return {"message": "Unit ignored", "id": unit_id}
|
|
|
|
|
|
@router.delete("/ignore/{unit_id}")
|
|
def unignore_unit(unit_id: str, db: Session = Depends(get_db)):
|
|
"""
|
|
Remove a unit from the ignore list.
|
|
"""
|
|
ignored = db.query(IgnoredUnit).filter(IgnoredUnit.id == unit_id).first()
|
|
if not ignored:
|
|
raise HTTPException(status_code=404, detail="Unit not in ignore list")
|
|
|
|
db.delete(ignored)
|
|
db.commit()
|
|
return {"message": "Unit unignored", "id": unit_id}
|
|
|
|
|
|
@router.get("/ignored")
|
|
def list_ignored_units(db: Session = Depends(get_db)):
|
|
"""
|
|
Get list of all ignored units.
|
|
"""
|
|
ignored_units = db.query(IgnoredUnit).all()
|
|
return {
|
|
"ignored": [
|
|
{
|
|
"id": unit.id,
|
|
"reason": unit.reason,
|
|
"ignored_at": unit.ignored_at.isoformat()
|
|
}
|
|
for unit in ignored_units
|
|
]
|
|
}
|
|
|
|
|
|
@router.get("/history/{unit_id}")
|
|
def get_unit_history(unit_id: str, db: Session = Depends(get_db)):
|
|
"""
|
|
Get complete history timeline for a unit.
|
|
Returns all historical changes ordered by most recent first.
|
|
"""
|
|
history_entries = db.query(UnitHistory).filter(
|
|
UnitHistory.unit_id == unit_id
|
|
).order_by(UnitHistory.changed_at.desc()).all()
|
|
|
|
return {
|
|
"unit_id": unit_id,
|
|
"history": [
|
|
{
|
|
"id": entry.id,
|
|
"change_type": entry.change_type,
|
|
"field_name": entry.field_name,
|
|
"old_value": entry.old_value,
|
|
"new_value": entry.new_value,
|
|
"changed_at": entry.changed_at.isoformat(),
|
|
"source": entry.source,
|
|
"notes": entry.notes
|
|
}
|
|
for entry in history_entries
|
|
]
|
|
}
|
|
|
|
|
|
@router.delete("/history/{history_id}")
|
|
def delete_history_entry(history_id: int, db: Session = Depends(get_db)):
|
|
"""
|
|
Delete a specific history entry by ID.
|
|
Allows manual cleanup of old history entries.
|
|
"""
|
|
history_entry = db.query(UnitHistory).filter(UnitHistory.id == history_id).first()
|
|
if not history_entry:
|
|
raise HTTPException(status_code=404, detail="History entry not found")
|
|
|
|
db.delete(history_entry)
|
|
db.commit()
|
|
return {"message": "History entry deleted", "id": history_id}
|
|
|
|
|
|
@router.post("/pair-devices")
|
|
async def pair_devices(
|
|
request: Request,
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Create a bidirectional pairing between a recorder (seismograph/SLM) and a modem.
|
|
|
|
Sets:
|
|
- recorder.deployed_with_modem_id = modem_id
|
|
- modem.deployed_with_unit_id = recorder_id
|
|
|
|
Also clears any previous pairings for both devices.
|
|
"""
|
|
data = await request.json()
|
|
recorder_id = data.get("recorder_id")
|
|
modem_id = data.get("modem_id")
|
|
|
|
if not recorder_id or not modem_id:
|
|
raise HTTPException(status_code=400, detail="Both recorder_id and modem_id are required")
|
|
|
|
# Get or create the units
|
|
recorder = db.query(RosterUnit).filter(RosterUnit.id == recorder_id).first()
|
|
modem = db.query(RosterUnit).filter(RosterUnit.id == modem_id).first()
|
|
|
|
if not recorder:
|
|
raise HTTPException(status_code=404, detail=f"Recorder {recorder_id} not found in roster")
|
|
if not modem:
|
|
raise HTTPException(status_code=404, detail=f"Modem {modem_id} not found in roster")
|
|
|
|
# Validate device types
|
|
if recorder.device_type == "modem":
|
|
raise HTTPException(status_code=400, detail=f"{recorder_id} is a modem, not a recorder")
|
|
if modem.device_type != "modem":
|
|
raise HTTPException(status_code=400, detail=f"{modem_id} is not a modem (type: {modem.device_type})")
|
|
|
|
# Clear any previous pairings
|
|
# If recorder was paired with a different modem, clear that modem's link
|
|
if recorder.deployed_with_modem_id and recorder.deployed_with_modem_id != modem_id:
|
|
old_modem = db.query(RosterUnit).filter(RosterUnit.id == recorder.deployed_with_modem_id).first()
|
|
if old_modem and old_modem.deployed_with_unit_id == recorder_id:
|
|
record_history(db, old_modem.id, "update", "deployed_with_unit_id",
|
|
old_modem.deployed_with_unit_id, None, "pair_devices", f"Cleared by new pairing")
|
|
old_modem.deployed_with_unit_id = None
|
|
|
|
# If modem was paired with a different recorder, clear that recorder's link
|
|
if modem.deployed_with_unit_id and modem.deployed_with_unit_id != recorder_id:
|
|
old_recorder = db.query(RosterUnit).filter(RosterUnit.id == modem.deployed_with_unit_id).first()
|
|
if old_recorder and old_recorder.deployed_with_modem_id == modem_id:
|
|
record_history(db, old_recorder.id, "update", "deployed_with_modem_id",
|
|
old_recorder.deployed_with_modem_id, None, "pair_devices", f"Cleared by new pairing")
|
|
old_recorder.deployed_with_modem_id = None
|
|
|
|
# Record history for the pairing
|
|
old_recorder_modem = recorder.deployed_with_modem_id
|
|
old_modem_unit = modem.deployed_with_unit_id
|
|
|
|
# Set the new pairing
|
|
recorder.deployed_with_modem_id = modem_id
|
|
modem.deployed_with_unit_id = recorder_id
|
|
|
|
# Record history
|
|
if old_recorder_modem != modem_id:
|
|
record_history(db, recorder_id, "update", "deployed_with_modem_id",
|
|
old_recorder_modem, modem_id, "pair_devices", f"Paired with modem")
|
|
if old_modem_unit != recorder_id:
|
|
record_history(db, modem_id, "update", "deployed_with_unit_id",
|
|
old_modem_unit, recorder_id, "pair_devices", f"Paired with recorder")
|
|
|
|
db.commit()
|
|
|
|
logger.info(f"Paired {recorder_id} with modem {modem_id}")
|
|
|
|
# If SLM, sync to SLMM cache
|
|
if recorder.device_type == "slm":
|
|
await sync_slm_to_slmm_cache(
|
|
unit_id=recorder_id,
|
|
host=recorder.slm_host,
|
|
tcp_port=recorder.slm_tcp_port,
|
|
ftp_port=recorder.slm_ftp_port,
|
|
deployed_with_modem_id=modem_id,
|
|
db=db
|
|
)
|
|
|
|
return {
|
|
"success": True,
|
|
"message": f"Paired {recorder_id} with {modem_id}",
|
|
"recorder_id": recorder_id,
|
|
"modem_id": modem_id
|
|
}
|
|
|
|
|
|
@router.post("/unpair-devices")
|
|
async def unpair_devices(
|
|
request: Request,
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Remove the bidirectional pairing between a recorder and modem.
|
|
|
|
Clears:
|
|
- recorder.deployed_with_modem_id
|
|
- modem.deployed_with_unit_id
|
|
"""
|
|
data = await request.json()
|
|
recorder_id = data.get("recorder_id")
|
|
modem_id = data.get("modem_id")
|
|
|
|
if not recorder_id or not modem_id:
|
|
raise HTTPException(status_code=400, detail="Both recorder_id and modem_id are required")
|
|
|
|
recorder = db.query(RosterUnit).filter(RosterUnit.id == recorder_id).first()
|
|
modem = db.query(RosterUnit).filter(RosterUnit.id == modem_id).first()
|
|
|
|
changes_made = False
|
|
|
|
if recorder and recorder.deployed_with_modem_id == modem_id:
|
|
record_history(db, recorder_id, "update", "deployed_with_modem_id",
|
|
recorder.deployed_with_modem_id, None, "unpair_devices", "Unpairing")
|
|
recorder.deployed_with_modem_id = None
|
|
changes_made = True
|
|
|
|
if modem and modem.deployed_with_unit_id == recorder_id:
|
|
record_history(db, modem_id, "update", "deployed_with_unit_id",
|
|
modem.deployed_with_unit_id, None, "unpair_devices", "Unpairing")
|
|
modem.deployed_with_unit_id = None
|
|
changes_made = True
|
|
|
|
if changes_made:
|
|
db.commit()
|
|
logger.info(f"Unpaired {recorder_id} from modem {modem_id}")
|
|
return {
|
|
"success": True,
|
|
"message": f"Unpaired {recorder_id} from {modem_id}"
|
|
}
|
|
else:
|
|
return {
|
|
"success": False,
|
|
"message": "No pairing found between these devices"
|
|
}
|