from fastapi import APIRouter, Depends, HTTPException, Form, UploadFile, File, Request from fastapi.exceptions import RequestValidationError from sqlalchemy.orm import Session from datetime import datetime, date import csv import io import logging import httpx import os from backend.database import get_db from backend.models import RosterUnit, IgnoredUnit, Emitter, UnitHistory router = APIRouter(prefix="/api/roster", tags=["roster-edit"]) logger = logging.getLogger(__name__) # SLMM backend URL for syncing device configs to cache SLMM_BASE_URL = os.getenv("SLMM_BASE_URL", "http://localhost:8100") def record_history(db: Session, unit_id: str, change_type: str, field_name: str = None, old_value: str = None, new_value: str = None, source: str = "manual", notes: str = None): """Helper function to record a change in unit history""" history_entry = UnitHistory( unit_id=unit_id, change_type=change_type, field_name=field_name, old_value=old_value, new_value=new_value, changed_at=datetime.utcnow(), source=source, notes=notes ) db.add(history_entry) # Note: caller is responsible for db.commit() def get_or_create_roster_unit(db: Session, unit_id: str): unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first() if not unit: unit = RosterUnit(id=unit_id) db.add(unit) db.commit() db.refresh(unit) return unit async def sync_slm_to_slmm_cache( unit_id: str, host: str = None, tcp_port: int = None, ftp_port: int = None, ftp_username: str = None, ftp_password: str = None, deployed_with_modem_id: str = None, db: Session = None ) -> dict: """ Sync SLM device configuration to SLMM backend cache. Terra-View is the source of truth for device configs. This function updates SLMM's config cache (NL43Config table) so SLMM can look up device connection info by unit_id without Terra-View passing host:port with every request. Args: unit_id: Unique identifier for the SLM device host: Direct IP address/hostname OR will be resolved from modem tcp_port: TCP control port (default: 2255) ftp_port: FTP port (default: 21) ftp_username: FTP username (optional) ftp_password: FTP password (optional) deployed_with_modem_id: If set, resolve modem IP as host db: Database session for modem lookup Returns: dict: {"success": bool, "message": str} """ # Resolve host from modem if assigned if deployed_with_modem_id and db: modem = db.query(RosterUnit).filter_by( id=deployed_with_modem_id, device_type="modem" ).first() if modem and modem.ip_address: host = modem.ip_address logger.info(f"Resolved host from modem {deployed_with_modem_id}: {host}") # Validate required fields if not host: logger.warning(f"Cannot sync SLM {unit_id} to SLMM: no host/IP address provided") return {"success": False, "message": "No host IP address available"} # Set defaults tcp_port = tcp_port or 2255 ftp_port = ftp_port or 21 # Build SLMM cache payload config_payload = { "host": host, "tcp_port": tcp_port, "tcp_enabled": True, "ftp_enabled": bool(ftp_username and ftp_password), "web_enabled": False } if ftp_username and ftp_password: config_payload["ftp_username"] = ftp_username config_payload["ftp_password"] = ftp_password # Call SLMM cache update API slmm_url = f"{SLMM_BASE_URL}/api/nl43/{unit_id}/config" try: async with httpx.AsyncClient(timeout=10.0) as client: response = await client.put(slmm_url, json=config_payload) if response.status_code in [200, 201]: logger.info(f"Successfully synced SLM {unit_id} to SLMM cache") return {"success": True, "message": "Device config cached in SLMM"} else: logger.error(f"SLMM cache sync failed for {unit_id}: HTTP {response.status_code}") return {"success": False, "message": f"SLMM returned status {response.status_code}"} except httpx.ConnectError: logger.error(f"Cannot connect to SLMM service at {SLMM_BASE_URL}") return {"success": False, "message": "SLMM service unavailable"} except Exception as e: logger.error(f"Error syncing SLM {unit_id} to SLMM: {e}") return {"success": False, "message": str(e)} @router.post("/add") async def add_roster_unit( id: str = Form(...), device_type: str = Form("seismograph"), unit_type: str = Form("series3"), deployed: str = Form(None), retired: str = Form(None), note: str = Form(""), project_id: str = Form(None), location: str = Form(None), address: str = Form(None), coordinates: str = Form(None), # Seismograph-specific fields last_calibrated: str = Form(None), next_calibration_due: str = Form(None), deployed_with_modem_id: str = Form(None), # Modem-specific fields ip_address: str = Form(None), phone_number: str = Form(None), hardware_model: str = Form(None), # Sound Level Meter-specific fields slm_host: str = Form(None), slm_tcp_port: str = Form(None), slm_ftp_port: str = Form(None), slm_model: str = Form(None), slm_serial_number: str = Form(None), slm_frequency_weighting: str = Form(None), slm_time_weighting: str = Form(None), slm_measurement_range: str = Form(None), db: Session = Depends(get_db) ): logger.info(f"Adding unit: id={id}, device_type={device_type}, deployed={deployed}, retired={retired}") # Convert boolean strings to actual booleans deployed_bool = deployed in ['true', 'True', '1', 'yes'] if deployed else False retired_bool = retired in ['true', 'True', '1', 'yes'] if retired else False # Convert port strings to integers slm_tcp_port_int = int(slm_tcp_port) if slm_tcp_port and slm_tcp_port.strip() else None slm_ftp_port_int = int(slm_ftp_port) if slm_ftp_port and slm_ftp_port.strip() else None if db.query(RosterUnit).filter(RosterUnit.id == id).first(): raise HTTPException(status_code=400, detail="Unit already exists") # Parse date fields if provided last_cal_date = None if last_calibrated: try: last_cal_date = datetime.strptime(last_calibrated, "%Y-%m-%d").date() except ValueError: raise HTTPException(status_code=400, detail="Invalid last_calibrated date format. Use YYYY-MM-DD") next_cal_date = None if next_calibration_due: try: next_cal_date = datetime.strptime(next_calibration_due, "%Y-%m-%d").date() except ValueError: raise HTTPException(status_code=400, detail="Invalid next_calibration_due date format. Use YYYY-MM-DD") unit = RosterUnit( id=id, device_type=device_type, unit_type=unit_type, deployed=deployed_bool, retired=retired_bool, note=note, project_id=project_id, location=location, address=address, coordinates=coordinates, last_updated=datetime.utcnow(), # Seismograph-specific fields last_calibrated=last_cal_date, next_calibration_due=next_cal_date, deployed_with_modem_id=deployed_with_modem_id if deployed_with_modem_id else None, # Modem-specific fields ip_address=ip_address if ip_address else None, phone_number=phone_number if phone_number else None, hardware_model=hardware_model if hardware_model else None, # Sound Level Meter-specific fields slm_host=slm_host if slm_host else None, slm_tcp_port=slm_tcp_port_int, slm_ftp_port=slm_ftp_port_int, slm_model=slm_model if slm_model else None, slm_serial_number=slm_serial_number if slm_serial_number else None, slm_frequency_weighting=slm_frequency_weighting if slm_frequency_weighting else None, slm_time_weighting=slm_time_weighting if slm_time_weighting else None, slm_measurement_range=slm_measurement_range if slm_measurement_range else None, ) db.add(unit) db.commit() # If sound level meter, sync config to SLMM cache if device_type == "slm": logger.info(f"Syncing SLM {id} config to SLMM cache...") result = await sync_slm_to_slmm_cache( unit_id=id, host=slm_host, tcp_port=slm_tcp_port_int, ftp_port=slm_ftp_port_int, deployed_with_modem_id=deployed_with_modem_id, db=db ) if not result["success"]: logger.warning(f"SLMM cache sync warning for {id}: {result['message']}") # Don't fail the operation - device is still added to Terra-View roster # User can manually sync later or SLMM will be synced on next config update return {"message": "Unit added", "id": id, "device_type": device_type} @router.get("/modems") def get_modems_list(db: Session = Depends(get_db)): """Get list of all modem units for dropdown selection""" modems = db.query(RosterUnit).filter_by(device_type="modem", retired=False).order_by(RosterUnit.id).all() return [ { "id": modem.id, "ip_address": modem.ip_address, "phone_number": modem.phone_number, "hardware_model": modem.hardware_model, "deployed": modem.deployed } for modem in modems ] @router.get("/{unit_id}") def get_roster_unit(unit_id: str, db: Session = Depends(get_db)): """Get a single roster unit by ID""" unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first() if not unit: raise HTTPException(status_code=404, detail="Unit not found") return { "id": unit.id, "device_type": unit.device_type or "seismograph", "unit_type": unit.unit_type, "deployed": unit.deployed, "retired": unit.retired, "note": unit.note or "", "project_id": unit.project_id or "", "location": unit.location or "", "address": unit.address or "", "coordinates": unit.coordinates or "", "last_calibrated": unit.last_calibrated.isoformat() if unit.last_calibrated else "", "next_calibration_due": unit.next_calibration_due.isoformat() if unit.next_calibration_due else "", "deployed_with_modem_id": unit.deployed_with_modem_id or "", "ip_address": unit.ip_address or "", "phone_number": unit.phone_number or "", "hardware_model": unit.hardware_model or "", "slm_host": unit.slm_host or "", "slm_tcp_port": unit.slm_tcp_port or "", "slm_ftp_port": unit.slm_ftp_port or "", "slm_model": unit.slm_model or "", "slm_serial_number": unit.slm_serial_number or "", "slm_frequency_weighting": unit.slm_frequency_weighting or "", "slm_time_weighting": unit.slm_time_weighting or "", "slm_measurement_range": unit.slm_measurement_range or "", } @router.post("/edit/{unit_id}") def edit_roster_unit( unit_id: str, device_type: str = Form("seismograph"), unit_type: str = Form("series3"), deployed: str = Form(None), retired: str = Form(None), note: str = Form(""), project_id: str = Form(None), location: str = Form(None), address: str = Form(None), coordinates: str = Form(None), # Seismograph-specific fields last_calibrated: str = Form(None), next_calibration_due: str = Form(None), deployed_with_modem_id: str = Form(None), # Modem-specific fields ip_address: str = Form(None), phone_number: str = Form(None), hardware_model: str = Form(None), # Sound Level Meter-specific fields slm_host: str = Form(None), slm_tcp_port: str = Form(None), slm_ftp_port: str = Form(None), slm_model: str = Form(None), slm_serial_number: str = Form(None), slm_frequency_weighting: str = Form(None), slm_time_weighting: str = Form(None), slm_measurement_range: str = Form(None), db: Session = Depends(get_db) ): unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first() if not unit: raise HTTPException(status_code=404, detail="Unit not found") # Convert boolean strings to actual booleans deployed_bool = deployed in ['true', 'True', '1', 'yes'] if deployed else False retired_bool = retired in ['true', 'True', '1', 'yes'] if retired else False # Convert port strings to integers slm_tcp_port_int = int(slm_tcp_port) if slm_tcp_port and slm_tcp_port.strip() else None slm_ftp_port_int = int(slm_ftp_port) if slm_ftp_port and slm_ftp_port.strip() else None # Parse date fields if provided last_cal_date = None if last_calibrated: try: last_cal_date = datetime.strptime(last_calibrated, "%Y-%m-%d").date() except ValueError: raise HTTPException(status_code=400, detail="Invalid last_calibrated date format. Use YYYY-MM-DD") next_cal_date = None if next_calibration_due: try: next_cal_date = datetime.strptime(next_calibration_due, "%Y-%m-%d").date() except ValueError: raise HTTPException(status_code=400, detail="Invalid next_calibration_due date format. Use YYYY-MM-DD") # Track changes for history old_note = unit.note old_deployed = unit.deployed old_retired = unit.retired # Update all fields unit.device_type = device_type unit.unit_type = unit_type unit.deployed = deployed_bool unit.retired = retired_bool unit.note = note unit.project_id = project_id unit.location = location unit.address = address unit.coordinates = coordinates unit.last_updated = datetime.utcnow() # Seismograph-specific fields unit.last_calibrated = last_cal_date unit.next_calibration_due = next_cal_date unit.deployed_with_modem_id = deployed_with_modem_id if deployed_with_modem_id else None # Modem-specific fields unit.ip_address = ip_address if ip_address else None unit.phone_number = phone_number if phone_number else None unit.hardware_model = hardware_model if hardware_model else None # Sound Level Meter-specific fields unit.slm_host = slm_host if slm_host else None unit.slm_tcp_port = slm_tcp_port_int unit.slm_ftp_port = slm_ftp_port_int unit.slm_model = slm_model if slm_model else None unit.slm_serial_number = slm_serial_number if slm_serial_number else None unit.slm_frequency_weighting = slm_frequency_weighting if slm_frequency_weighting else None unit.slm_time_weighting = slm_time_weighting if slm_time_weighting else None unit.slm_measurement_range = slm_measurement_range if slm_measurement_range else None # Record history entries for changed fields if old_note != note: record_history(db, unit_id, "note_change", "note", old_note, note, "manual") if old_deployed != deployed: status_text = "deployed" if deployed else "benched" old_status_text = "deployed" if old_deployed else "benched" record_history(db, unit_id, "deployed_change", "deployed", old_status_text, status_text, "manual") if old_retired != retired: status_text = "retired" if retired else "active" old_status_text = "retired" if old_retired else "active" record_history(db, unit_id, "retired_change", "retired", old_status_text, status_text, "manual") db.commit() return {"message": "Unit updated", "id": unit_id, "device_type": device_type} @router.post("/set-deployed/{unit_id}") def set_deployed(unit_id: str, deployed: bool = Form(...), db: Session = Depends(get_db)): unit = get_or_create_roster_unit(db, unit_id) old_deployed = unit.deployed unit.deployed = deployed unit.last_updated = datetime.utcnow() # Record history entry for deployed status change if old_deployed != deployed: status_text = "deployed" if deployed else "benched" old_status_text = "deployed" if old_deployed else "benched" record_history( db=db, unit_id=unit_id, change_type="deployed_change", field_name="deployed", old_value=old_status_text, new_value=status_text, source="manual" ) db.commit() return {"message": "Updated", "id": unit_id, "deployed": deployed} @router.post("/set-retired/{unit_id}") def set_retired(unit_id: str, retired: bool = Form(...), db: Session = Depends(get_db)): unit = get_or_create_roster_unit(db, unit_id) old_retired = unit.retired unit.retired = retired unit.last_updated = datetime.utcnow() # Record history entry for retired status change if old_retired != retired: status_text = "retired" if retired else "active" old_status_text = "retired" if old_retired else "active" record_history( db=db, unit_id=unit_id, change_type="retired_change", field_name="retired", old_value=old_status_text, new_value=status_text, source="manual" ) db.commit() return {"message": "Updated", "id": unit_id, "retired": retired} @router.delete("/{unit_id}") async def delete_roster_unit(unit_id: str, db: Session = Depends(get_db)): """ Permanently delete a unit from the database. Checks roster, emitters, and ignored_units tables and deletes from any table where the unit exists. For SLM devices, also removes from SLMM to stop background polling. """ deleted = False was_slm = False # Try to delete from roster table roster_unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first() if roster_unit: was_slm = roster_unit.device_type == "slm" db.delete(roster_unit) deleted = True # Try to delete from emitters table emitter = db.query(Emitter).filter(Emitter.id == unit_id).first() if emitter: db.delete(emitter) deleted = True # Try to delete from ignored_units table ignored_unit = db.query(IgnoredUnit).filter(IgnoredUnit.id == unit_id).first() if ignored_unit: db.delete(ignored_unit) deleted = True # If not found in any table, return error if not deleted: raise HTTPException(status_code=404, detail="Unit not found") db.commit() # If it was an SLM, also delete from SLMM if was_slm: try: async with httpx.AsyncClient(timeout=5.0) as client: response = await client.delete(f"{SLMM_BASE_URL}/api/nl43/{unit_id}/config") if response.status_code in [200, 404]: logger.info(f"Deleted SLM {unit_id} from SLMM") else: logger.warning(f"Failed to delete SLM {unit_id} from SLMM: {response.status_code}") except Exception as e: logger.error(f"Error deleting SLM {unit_id} from SLMM: {e}") return {"message": "Unit deleted", "id": unit_id} @router.post("/set-note/{unit_id}") def set_note(unit_id: str, note: str = Form(""), db: Session = Depends(get_db)): unit = get_or_create_roster_unit(db, unit_id) old_note = unit.note unit.note = note unit.last_updated = datetime.utcnow() # Record history entry for note change if old_note != note: record_history( db=db, unit_id=unit_id, change_type="note_change", field_name="note", old_value=old_note, new_value=note, source="manual" ) db.commit() return {"message": "Updated", "id": unit_id, "note": note} def _parse_bool(value: str) -> bool: """Parse boolean from CSV string value.""" return value.lower() in ('true', '1', 'yes') if value else False def _parse_int(value: str) -> int | None: """Parse integer from CSV string value, return None if empty or invalid.""" if not value or not value.strip(): return None try: return int(value.strip()) except ValueError: return None def _parse_date(value: str) -> date | None: """Parse date from CSV string value (YYYY-MM-DD format).""" if not value or not value.strip(): return None try: return datetime.strptime(value.strip(), '%Y-%m-%d').date() except ValueError: return None def _get_csv_value(row: dict, key: str, default=None): """Get value from CSV row, return default if empty.""" value = row.get(key, '').strip() if row.get(key) else '' return value if value else default @router.post("/import-csv") async def import_csv( file: UploadFile = File(...), update_existing: bool = Form(True), db: Session = Depends(get_db) ): """ Import roster units from CSV file. Expected CSV columns (unit_id is required, others are optional): Common fields (all device types): - unit_id: Unique identifier for the unit (REQUIRED) - device_type: "seismograph", "modem", or "slm" (default: "seismograph") - unit_type: Sub-type (e.g., "series3", "series4" for seismographs) - deployed: Boolean (true/false/yes/no/1/0) - retired: Boolean - note: Notes about the unit - project_id: Project identifier - location: Location description - address: Street address - coordinates: GPS coordinates (lat;lon or lat,lon) Seismograph-specific: - last_calibrated: Date (YYYY-MM-DD) - next_calibration_due: Date (YYYY-MM-DD) - deployed_with_modem_id: ID of paired modem Modem-specific: - ip_address: Device IP address - phone_number: SIM card phone number - hardware_model: Hardware model (e.g., IBR900, RV55) SLM-specific: - slm_host: Device IP or hostname - slm_tcp_port: TCP control port (default 2255) - slm_ftp_port: FTP port (default 21) - slm_model: Device model (NL-43, NL-53) - slm_serial_number: Serial number - slm_frequency_weighting: A, C, or Z - slm_time_weighting: F (Fast), S (Slow), I (Impulse) - slm_measurement_range: e.g., "30-130 dB" Lines starting with # are treated as comments and skipped. Args: file: CSV file upload update_existing: If True, update existing units; if False, skip them """ if not file.filename.endswith('.csv'): raise HTTPException(status_code=400, detail="File must be a CSV") # Read file content contents = await file.read() csv_text = contents.decode('utf-8') # Filter out comment lines (starting with #) lines = csv_text.split('\n') filtered_lines = [line for line in lines if not line.strip().startswith('#')] csv_text = '\n'.join(filtered_lines) csv_reader = csv.DictReader(io.StringIO(csv_text)) results = { "added": [], "updated": [], "skipped": [], "errors": [] } for row_num, row in enumerate(csv_reader, start=2): # Start at 2 to account for header try: # Validate required field unit_id = row.get('unit_id', '').strip() if not unit_id: results["errors"].append({ "row": row_num, "error": "Missing required field: unit_id" }) continue # Determine device type device_type = _get_csv_value(row, 'device_type', 'seismograph') # Check if unit exists existing_unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first() if existing_unit: if not update_existing: results["skipped"].append(unit_id) continue # Update existing unit - common fields existing_unit.device_type = device_type existing_unit.unit_type = _get_csv_value(row, 'unit_type', existing_unit.unit_type or 'series3') existing_unit.deployed = _parse_bool(row.get('deployed', '')) if row.get('deployed') else existing_unit.deployed existing_unit.retired = _parse_bool(row.get('retired', '')) if row.get('retired') else existing_unit.retired existing_unit.note = _get_csv_value(row, 'note', existing_unit.note) existing_unit.project_id = _get_csv_value(row, 'project_id', existing_unit.project_id) existing_unit.location = _get_csv_value(row, 'location', existing_unit.location) existing_unit.address = _get_csv_value(row, 'address', existing_unit.address) existing_unit.coordinates = _get_csv_value(row, 'coordinates', existing_unit.coordinates) existing_unit.last_updated = datetime.utcnow() # Seismograph-specific fields if row.get('last_calibrated'): existing_unit.last_calibrated = _parse_date(row.get('last_calibrated')) if row.get('next_calibration_due'): existing_unit.next_calibration_due = _parse_date(row.get('next_calibration_due')) if row.get('deployed_with_modem_id'): existing_unit.deployed_with_modem_id = _get_csv_value(row, 'deployed_with_modem_id') # Modem-specific fields if row.get('ip_address'): existing_unit.ip_address = _get_csv_value(row, 'ip_address') if row.get('phone_number'): existing_unit.phone_number = _get_csv_value(row, 'phone_number') if row.get('hardware_model'): existing_unit.hardware_model = _get_csv_value(row, 'hardware_model') # SLM-specific fields if row.get('slm_host'): existing_unit.slm_host = _get_csv_value(row, 'slm_host') if row.get('slm_tcp_port'): existing_unit.slm_tcp_port = _parse_int(row.get('slm_tcp_port')) if row.get('slm_ftp_port'): existing_unit.slm_ftp_port = _parse_int(row.get('slm_ftp_port')) if row.get('slm_model'): existing_unit.slm_model = _get_csv_value(row, 'slm_model') if row.get('slm_serial_number'): existing_unit.slm_serial_number = _get_csv_value(row, 'slm_serial_number') if row.get('slm_frequency_weighting'): existing_unit.slm_frequency_weighting = _get_csv_value(row, 'slm_frequency_weighting') if row.get('slm_time_weighting'): existing_unit.slm_time_weighting = _get_csv_value(row, 'slm_time_weighting') if row.get('slm_measurement_range'): existing_unit.slm_measurement_range = _get_csv_value(row, 'slm_measurement_range') results["updated"].append(unit_id) else: # Create new unit with all fields new_unit = RosterUnit( id=unit_id, device_type=device_type, unit_type=_get_csv_value(row, 'unit_type', 'series3'), deployed=_parse_bool(row.get('deployed', '')), retired=_parse_bool(row.get('retired', '')), note=_get_csv_value(row, 'note', ''), project_id=_get_csv_value(row, 'project_id'), location=_get_csv_value(row, 'location'), address=_get_csv_value(row, 'address'), coordinates=_get_csv_value(row, 'coordinates'), last_updated=datetime.utcnow(), # Seismograph fields last_calibrated=_parse_date(row.get('last_calibrated', '')), next_calibration_due=_parse_date(row.get('next_calibration_due', '')), deployed_with_modem_id=_get_csv_value(row, 'deployed_with_modem_id'), # Modem fields ip_address=_get_csv_value(row, 'ip_address'), phone_number=_get_csv_value(row, 'phone_number'), hardware_model=_get_csv_value(row, 'hardware_model'), # SLM fields slm_host=_get_csv_value(row, 'slm_host'), slm_tcp_port=_parse_int(row.get('slm_tcp_port', '')), slm_ftp_port=_parse_int(row.get('slm_ftp_port', '')), slm_model=_get_csv_value(row, 'slm_model'), slm_serial_number=_get_csv_value(row, 'slm_serial_number'), slm_frequency_weighting=_get_csv_value(row, 'slm_frequency_weighting'), slm_time_weighting=_get_csv_value(row, 'slm_time_weighting'), slm_measurement_range=_get_csv_value(row, 'slm_measurement_range'), ) db.add(new_unit) results["added"].append(unit_id) except Exception as e: results["errors"].append({ "row": row_num, "unit_id": row.get('unit_id', 'unknown'), "error": str(e) }) # Commit all changes try: db.commit() except Exception as e: db.rollback() raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") return { "message": "CSV import completed", "summary": { "added": len(results["added"]), "updated": len(results["updated"]), "skipped": len(results["skipped"]), "errors": len(results["errors"]) }, "details": results } @router.post("/ignore/{unit_id}") def ignore_unit(unit_id: str, reason: str = Form(""), db: Session = Depends(get_db)): """ Add a unit to the ignore list to suppress it from unknown emitters. """ # Check if already ignored if db.query(IgnoredUnit).filter(IgnoredUnit.id == unit_id).first(): raise HTTPException(status_code=400, detail="Unit already ignored") ignored = IgnoredUnit( id=unit_id, reason=reason, ignored_at=datetime.utcnow() ) db.add(ignored) db.commit() return {"message": "Unit ignored", "id": unit_id} @router.delete("/ignore/{unit_id}") def unignore_unit(unit_id: str, db: Session = Depends(get_db)): """ Remove a unit from the ignore list. """ ignored = db.query(IgnoredUnit).filter(IgnoredUnit.id == unit_id).first() if not ignored: raise HTTPException(status_code=404, detail="Unit not in ignore list") db.delete(ignored) db.commit() return {"message": "Unit unignored", "id": unit_id} @router.get("/ignored") def list_ignored_units(db: Session = Depends(get_db)): """ Get list of all ignored units. """ ignored_units = db.query(IgnoredUnit).all() return { "ignored": [ { "id": unit.id, "reason": unit.reason, "ignored_at": unit.ignored_at.isoformat() } for unit in ignored_units ] } @router.get("/history/{unit_id}") def get_unit_history(unit_id: str, db: Session = Depends(get_db)): """ Get complete history timeline for a unit. Returns all historical changes ordered by most recent first. """ history_entries = db.query(UnitHistory).filter( UnitHistory.unit_id == unit_id ).order_by(UnitHistory.changed_at.desc()).all() return { "unit_id": unit_id, "history": [ { "id": entry.id, "change_type": entry.change_type, "field_name": entry.field_name, "old_value": entry.old_value, "new_value": entry.new_value, "changed_at": entry.changed_at.isoformat(), "source": entry.source, "notes": entry.notes } for entry in history_entries ] } @router.delete("/history/{history_id}") def delete_history_entry(history_id: int, db: Session = Depends(get_db)): """ Delete a specific history entry by ID. Allows manual cleanup of old history entries. """ history_entry = db.query(UnitHistory).filter(UnitHistory.id == history_id).first() if not history_entry: raise HTTPException(status_code=404, detail="History entry not found") db.delete(history_entry) db.commit() return {"message": "History entry deleted", "id": history_id}