diff --git a/backend/main.py b/backend/main.py
index 3e0003a..e9264e8 100644
--- a/backend/main.py
+++ b/backend/main.py
@@ -1,12 +1,14 @@
-from fastapi import FastAPI, Request
+from fastapi import FastAPI, Request, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from fastapi.responses import HTMLResponse
+from sqlalchemy.orm import Session
-from backend.database import engine, Base
+from backend.database import engine, Base, get_db
from backend.routers import roster, units, photos, roster_edit, dashboard, dashboard_tabs
from backend.services.snapshot import emit_status_snapshot
+from backend.models import IgnoredUnit
# Create database tables
Base.metadata.create_all(bind=engine)
@@ -41,6 +43,9 @@ app.include_router(roster_edit.router)
app.include_router(dashboard.router)
app.include_router(dashboard_tabs.router)
+from backend.routers import settings
+app.include_router(settings.router)
+
# Legacy routes from the original backend
@@ -70,14 +75,20 @@ async def unit_detail_page(request: Request, unit_id: str):
})
-@app.get("/partials/roster-table", response_class=HTMLResponse)
-async def roster_table_partial(request: Request):
- """Partial template for roster table (HTMX)"""
+@app.get("/settings", response_class=HTMLResponse)
+async def settings_page(request: Request):
+ """Settings page for roster management"""
+ return templates.TemplateResponse("settings.html", {"request": request})
+
+
+@app.get("/partials/roster-deployed", response_class=HTMLResponse)
+async def roster_deployed_partial(request: Request):
+ """Partial template for deployed units tab"""
from datetime import datetime
snapshot = emit_status_snapshot()
units_list = []
- for unit_id, unit_data in snapshot["units"].items():
+ for unit_id, unit_data in snapshot["active"].items():
units_list.append({
"id": unit_id,
"status": unit_data["status"],
@@ -85,6 +96,13 @@ async def roster_table_partial(request: Request):
"last_seen": unit_data["last"],
"deployed": unit_data["deployed"],
"note": unit_data.get("note", ""),
+ "device_type": unit_data.get("device_type", "seismograph"),
+ "last_calibrated": unit_data.get("last_calibrated"),
+ "next_calibration_due": unit_data.get("next_calibration_due"),
+ "deployed_with_modem_id": unit_data.get("deployed_with_modem_id"),
+ "ip_address": unit_data.get("ip_address"),
+ "phone_number": unit_data.get("phone_number"),
+ "hardware_model": unit_data.get("hardware_model"),
})
# Sort by status priority (Missing > Pending > OK) then by ID
@@ -98,6 +116,98 @@ async def roster_table_partial(request: Request):
})
+@app.get("/partials/roster-benched", response_class=HTMLResponse)
+async def roster_benched_partial(request: Request):
+ """Partial template for benched units tab"""
+ from datetime import datetime
+ snapshot = emit_status_snapshot()
+
+ units_list = []
+ for unit_id, unit_data in snapshot["benched"].items():
+ units_list.append({
+ "id": unit_id,
+ "status": unit_data["status"],
+ "age": unit_data["age"],
+ "last_seen": unit_data["last"],
+ "deployed": unit_data["deployed"],
+ "note": unit_data.get("note", ""),
+ "device_type": unit_data.get("device_type", "seismograph"),
+ "last_calibrated": unit_data.get("last_calibrated"),
+ "next_calibration_due": unit_data.get("next_calibration_due"),
+ "deployed_with_modem_id": unit_data.get("deployed_with_modem_id"),
+ "ip_address": unit_data.get("ip_address"),
+ "phone_number": unit_data.get("phone_number"),
+ "hardware_model": unit_data.get("hardware_model"),
+ })
+
+ # Sort by ID
+ units_list.sort(key=lambda x: x["id"])
+
+ return templates.TemplateResponse("partials/roster_table.html", {
+ "request": request,
+ "units": units_list,
+ "timestamp": datetime.now().strftime("%H:%M:%S")
+ })
+
+
+@app.get("/partials/roster-retired", response_class=HTMLResponse)
+async def roster_retired_partial(request: Request):
+ """Partial template for retired units tab"""
+ from datetime import datetime
+ snapshot = emit_status_snapshot()
+
+ units_list = []
+ for unit_id, unit_data in snapshot["retired"].items():
+ units_list.append({
+ "id": unit_id,
+ "status": unit_data["status"],
+ "age": unit_data["age"],
+ "last_seen": unit_data["last"],
+ "deployed": unit_data["deployed"],
+ "note": unit_data.get("note", ""),
+ "device_type": unit_data.get("device_type", "seismograph"),
+ "last_calibrated": unit_data.get("last_calibrated"),
+ "next_calibration_due": unit_data.get("next_calibration_due"),
+ "deployed_with_modem_id": unit_data.get("deployed_with_modem_id"),
+ "ip_address": unit_data.get("ip_address"),
+ "phone_number": unit_data.get("phone_number"),
+ "hardware_model": unit_data.get("hardware_model"),
+ })
+
+ # Sort by ID
+ units_list.sort(key=lambda x: x["id"])
+
+ return templates.TemplateResponse("partials/retired_table.html", {
+ "request": request,
+ "units": units_list,
+ "timestamp": datetime.now().strftime("%H:%M:%S")
+ })
+
+
+@app.get("/partials/roster-ignored", response_class=HTMLResponse)
+async def roster_ignored_partial(request: Request, db: Session = Depends(get_db)):
+ """Partial template for ignored units tab"""
+ from datetime import datetime
+
+ ignored = db.query(IgnoredUnit).all()
+ ignored_list = []
+ for unit in ignored:
+ ignored_list.append({
+ "id": unit.id,
+ "reason": unit.reason or "",
+ "ignored_at": unit.ignored_at.strftime("%Y-%m-%d %H:%M:%S") if unit.ignored_at else "Unknown"
+ })
+
+ # Sort by ID
+ ignored_list.sort(key=lambda x: x["id"])
+
+ return templates.TemplateResponse("partials/ignored_table.html", {
+ "request": request,
+ "ignored_units": ignored_list,
+ "timestamp": datetime.now().strftime("%H:%M:%S")
+ })
+
+
@app.get("/partials/unknown-emitters", response_class=HTMLResponse)
async def unknown_emitters_partial(request: Request):
"""Partial template for unknown emitters (HTMX)"""
diff --git a/backend/migrate_add_device_types.py b/backend/migrate_add_device_types.py
new file mode 100644
index 0000000..f923f34
--- /dev/null
+++ b/backend/migrate_add_device_types.py
@@ -0,0 +1,84 @@
+"""
+Migration script to add device type support to the roster table.
+
+This adds columns for:
+- device_type (seismograph/modem discriminator)
+- Seismograph-specific fields (calibration dates, modem pairing)
+- Modem-specific fields (IP address, phone number, hardware model)
+
+Run this script once to migrate an existing database.
+"""
+
+import sqlite3
+import os
+
+# Database path
+DB_PATH = "./data/seismo_fleet.db"
+
+def migrate_database():
+ """Add new columns to the roster table"""
+
+ if not os.path.exists(DB_PATH):
+ print(f"Database not found at {DB_PATH}")
+ print("The database will be created automatically when you run the application.")
+ return
+
+ print(f"Migrating database: {DB_PATH}")
+
+ conn = sqlite3.connect(DB_PATH)
+ cursor = conn.cursor()
+
+ # Check if device_type column already exists
+ cursor.execute("PRAGMA table_info(roster)")
+ columns = [col[1] for col in cursor.fetchall()]
+
+ if "device_type" in columns:
+ print("Migration already applied - device_type column exists")
+ conn.close()
+ return
+
+ print("Adding new columns to roster table...")
+
+ try:
+ # Add device type discriminator
+ cursor.execute("ALTER TABLE roster ADD COLUMN device_type TEXT DEFAULT 'seismograph'")
+ print(" ✓ Added device_type column")
+
+ # Add seismograph-specific fields
+ cursor.execute("ALTER TABLE roster ADD COLUMN last_calibrated DATE")
+ print(" ✓ Added last_calibrated column")
+
+ cursor.execute("ALTER TABLE roster ADD COLUMN next_calibration_due DATE")
+ print(" ✓ Added next_calibration_due column")
+
+ cursor.execute("ALTER TABLE roster ADD COLUMN deployed_with_modem_id TEXT")
+ print(" ✓ Added deployed_with_modem_id column")
+
+ # Add modem-specific fields
+ cursor.execute("ALTER TABLE roster ADD COLUMN ip_address TEXT")
+ print(" ✓ Added ip_address column")
+
+ cursor.execute("ALTER TABLE roster ADD COLUMN phone_number TEXT")
+ print(" ✓ Added phone_number column")
+
+ cursor.execute("ALTER TABLE roster ADD COLUMN hardware_model TEXT")
+ print(" ✓ Added hardware_model column")
+
+ # Set all existing units to seismograph type
+ cursor.execute("UPDATE roster SET device_type = 'seismograph' WHERE device_type IS NULL")
+ print(" ✓ Set existing units to seismograph type")
+
+ conn.commit()
+ print("\nMigration completed successfully!")
+
+ except sqlite3.Error as e:
+ print(f"\nError during migration: {e}")
+ conn.rollback()
+ raise
+
+ finally:
+ conn.close()
+
+
+if __name__ == "__main__":
+ migrate_database()
diff --git a/backend/models.py b/backend/models.py
index 6f1f9d2..d941264 100644
--- a/backend/models.py
+++ b/backend/models.py
@@ -1,4 +1,4 @@
-from sqlalchemy import Column, String, DateTime, Boolean, Text
+from sqlalchemy import Column, String, DateTime, Boolean, Text, Date
from datetime import datetime
from backend.database import Base
@@ -18,18 +18,34 @@ class RosterUnit(Base):
"""
Roster table: represents our *intended assignment* of a unit.
This is editable from the GUI.
+
+ Supports multiple device types (seismograph, modem) with type-specific fields.
"""
__tablename__ = "roster"
+ # Core fields (all device types)
id = Column(String, primary_key=True, index=True)
- unit_type = Column(String, default="series3")
+ unit_type = Column(String, default="series3") # Backward compatibility
+ device_type = Column(String, default="seismograph") # "seismograph" | "modem"
deployed = Column(Boolean, default=True)
retired = Column(Boolean, default=False)
note = Column(String, nullable=True)
project_id = Column(String, nullable=True)
- location = Column(String, nullable=True)
+ location = Column(String, nullable=True) # Legacy field - use address/coordinates instead
+ address = Column(String, nullable=True) # Human-readable address
+ coordinates = Column(String, nullable=True) # Lat,Lon format: "34.0522,-118.2437"
last_updated = Column(DateTime, default=datetime.utcnow)
+ # Seismograph-specific fields (nullable for modems)
+ last_calibrated = Column(Date, nullable=True)
+ next_calibration_due = Column(Date, nullable=True)
+ deployed_with_modem_id = Column(String, nullable=True) # FK to another RosterUnit
+
+ # Modem-specific fields (nullable for seismographs)
+ ip_address = Column(String, nullable=True)
+ phone_number = Column(String, nullable=True)
+ hardware_model = Column(String, nullable=True)
+
class IgnoredUnit(Base):
"""
diff --git a/backend/routers/roster_edit.py b/backend/routers/roster_edit.py
index 1922942..82f2061 100644
--- a/backend/routers/roster_edit.py
+++ b/backend/routers/roster_edit.py
@@ -1,11 +1,11 @@
from fastapi import APIRouter, Depends, HTTPException, Form, UploadFile, File
from sqlalchemy.orm import Session
-from datetime import datetime
+from datetime import datetime, date
import csv
import io
from backend.database import get_db
-from backend.models import RosterUnit, IgnoredUnit
+from backend.models import RosterUnit, IgnoredUnit, Emitter
router = APIRouter(prefix="/api/roster", tags=["roster-edit"])
@@ -23,28 +23,161 @@ def get_or_create_roster_unit(db: Session, unit_id: str):
@router.post("/add")
def add_roster_unit(
id: str = Form(...),
+ device_type: str = Form("seismograph"),
unit_type: str = Form("series3"),
deployed: bool = Form(False),
+ retired: bool = Form(False),
note: str = Form(""),
project_id: str = Form(None),
location: str = Form(None),
+ address: str = Form(None),
+ coordinates: str = Form(None),
+ # Seismograph-specific fields
+ last_calibrated: str = Form(None),
+ next_calibration_due: str = Form(None),
+ deployed_with_modem_id: str = Form(None),
+ # Modem-specific fields
+ ip_address: str = Form(None),
+ phone_number: str = Form(None),
+ hardware_model: str = Form(None),
db: Session = Depends(get_db)
):
if db.query(RosterUnit).filter(RosterUnit.id == id).first():
raise HTTPException(status_code=400, detail="Unit already exists")
+ # Parse date fields if provided
+ last_cal_date = None
+ if last_calibrated:
+ try:
+ last_cal_date = datetime.strptime(last_calibrated, "%Y-%m-%d").date()
+ except ValueError:
+ raise HTTPException(status_code=400, detail="Invalid last_calibrated date format. Use YYYY-MM-DD")
+
+ next_cal_date = None
+ if next_calibration_due:
+ try:
+ next_cal_date = datetime.strptime(next_calibration_due, "%Y-%m-%d").date()
+ except ValueError:
+ raise HTTPException(status_code=400, detail="Invalid next_calibration_due date format. Use YYYY-MM-DD")
+
unit = RosterUnit(
id=id,
+ device_type=device_type,
unit_type=unit_type,
deployed=deployed,
+ retired=retired,
note=note,
project_id=project_id,
location=location,
+ address=address,
+ coordinates=coordinates,
last_updated=datetime.utcnow(),
+ # Seismograph-specific fields
+ last_calibrated=last_cal_date,
+ next_calibration_due=next_cal_date,
+ deployed_with_modem_id=deployed_with_modem_id if deployed_with_modem_id else None,
+ # Modem-specific fields
+ ip_address=ip_address if ip_address else None,
+ phone_number=phone_number if phone_number else None,
+ hardware_model=hardware_model if hardware_model else None,
)
db.add(unit)
db.commit()
- return {"message": "Unit added", "id": id}
+ return {"message": "Unit added", "id": id, "device_type": device_type}
+
+
+@router.get("/{unit_id}")
+def get_roster_unit(unit_id: str, db: Session = Depends(get_db)):
+ """Get a single roster unit by ID"""
+ unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
+ if not unit:
+ raise HTTPException(status_code=404, detail="Unit not found")
+
+ return {
+ "id": unit.id,
+ "device_type": unit.device_type or "seismograph",
+ "unit_type": unit.unit_type,
+ "deployed": unit.deployed,
+ "retired": unit.retired,
+ "note": unit.note or "",
+ "project_id": unit.project_id or "",
+ "location": unit.location or "",
+ "address": unit.address or "",
+ "coordinates": unit.coordinates or "",
+ "last_calibrated": unit.last_calibrated.isoformat() if unit.last_calibrated else "",
+ "next_calibration_due": unit.next_calibration_due.isoformat() if unit.next_calibration_due else "",
+ "deployed_with_modem_id": unit.deployed_with_modem_id or "",
+ "ip_address": unit.ip_address or "",
+ "phone_number": unit.phone_number or "",
+ "hardware_model": unit.hardware_model or "",
+ }
+
+
+@router.post("/edit/{unit_id}")
+def edit_roster_unit(
+ unit_id: str,
+ device_type: str = Form("seismograph"),
+ unit_type: str = Form("series3"),
+ deployed: bool = Form(False),
+ retired: bool = Form(False),
+ note: str = Form(""),
+ project_id: str = Form(None),
+ location: str = Form(None),
+ address: str = Form(None),
+ coordinates: str = Form(None),
+ # Seismograph-specific fields
+ last_calibrated: str = Form(None),
+ next_calibration_due: str = Form(None),
+ deployed_with_modem_id: str = Form(None),
+ # Modem-specific fields
+ ip_address: str = Form(None),
+ phone_number: str = Form(None),
+ hardware_model: str = Form(None),
+ db: Session = Depends(get_db)
+):
+ unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
+ if not unit:
+ raise HTTPException(status_code=404, detail="Unit not found")
+
+ # Parse date fields if provided
+ last_cal_date = None
+ if last_calibrated:
+ try:
+ last_cal_date = datetime.strptime(last_calibrated, "%Y-%m-%d").date()
+ except ValueError:
+ raise HTTPException(status_code=400, detail="Invalid last_calibrated date format. Use YYYY-MM-DD")
+
+ next_cal_date = None
+ if next_calibration_due:
+ try:
+ next_cal_date = datetime.strptime(next_calibration_due, "%Y-%m-%d").date()
+ except ValueError:
+ raise HTTPException(status_code=400, detail="Invalid next_calibration_due date format. Use YYYY-MM-DD")
+
+ # Update all fields
+ unit.device_type = device_type
+ unit.unit_type = unit_type
+ unit.deployed = deployed
+ unit.retired = retired
+ unit.note = note
+ unit.project_id = project_id
+ unit.location = location
+ unit.address = address
+ unit.coordinates = coordinates
+ unit.last_updated = datetime.utcnow()
+
+ # Seismograph-specific fields
+ unit.last_calibrated = last_cal_date
+ unit.next_calibration_due = next_cal_date
+ unit.deployed_with_modem_id = deployed_with_modem_id if deployed_with_modem_id else None
+
+ # Modem-specific fields
+ unit.ip_address = ip_address if ip_address else None
+ unit.phone_number = phone_number if phone_number else None
+ unit.hardware_model = hardware_model if hardware_model else None
+
+ db.commit()
+ return {"message": "Unit updated", "id": unit_id, "device_type": device_type}
@router.post("/set-deployed/{unit_id}")
@@ -65,6 +198,34 @@ def set_retired(unit_id: str, retired: bool = Form(...), db: Session = Depends(g
return {"message": "Updated", "id": unit_id, "retired": retired}
+@router.delete("/{unit_id}")
+def delete_roster_unit(unit_id: str, db: Session = Depends(get_db)):
+ """
+ Permanently delete a unit from the database.
+ Checks both roster and emitters tables and deletes from any table where the unit exists.
+ """
+ deleted = False
+
+ # Try to delete from roster table
+ roster_unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
+ if roster_unit:
+ db.delete(roster_unit)
+ deleted = True
+
+ # Try to delete from emitters table
+ emitter = db.query(Emitter).filter(Emitter.id == unit_id).first()
+ if emitter:
+ db.delete(emitter)
+ deleted = True
+
+ # If not found in either table, return error
+ if not deleted:
+ raise HTTPException(status_code=404, detail="Unit not found")
+
+ db.commit()
+ return {"message": "Unit deleted", "id": unit_id}
+
+
@router.post("/set-note/{unit_id}")
def set_note(unit_id: str, note: str = Form(""), db: Session = Depends(get_db)):
unit = get_or_create_roster_unit(db, unit_id)
@@ -138,6 +299,8 @@ async def import_csv(
existing_unit.note = row.get('note', existing_unit.note or '')
existing_unit.project_id = row.get('project_id', existing_unit.project_id)
existing_unit.location = row.get('location', existing_unit.location)
+ existing_unit.address = row.get('address', existing_unit.address)
+ existing_unit.coordinates = row.get('coordinates', existing_unit.coordinates)
existing_unit.last_updated = datetime.utcnow()
results["updated"].append(unit_id)
@@ -151,6 +314,8 @@ async def import_csv(
note=row.get('note', ''),
project_id=row.get('project_id'),
location=row.get('location'),
+ address=row.get('address'),
+ coordinates=row.get('coordinates'),
last_updated=datetime.utcnow()
)
db.add(new_unit)
diff --git a/backend/routers/settings.py b/backend/routers/settings.py
new file mode 100644
index 0000000..7063209
--- /dev/null
+++ b/backend/routers/settings.py
@@ -0,0 +1,241 @@
+from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
+from fastapi.responses import StreamingResponse
+from sqlalchemy.orm import Session
+from datetime import datetime, date
+import csv
+import io
+
+from backend.database import get_db
+from backend.models import RosterUnit, Emitter, IgnoredUnit
+
+router = APIRouter(prefix="/api/settings", tags=["settings"])
+
+
+@router.get("/export-csv")
+def export_roster_csv(db: Session = Depends(get_db)):
+ """Export all roster units to CSV"""
+ units = db.query(RosterUnit).all()
+
+ # Create CSV in memory
+ output = io.StringIO()
+ fieldnames = [
+ 'unit_id', 'unit_type', 'device_type', 'deployed', 'retired',
+ 'note', 'project_id', 'location', 'address', 'coordinates',
+ 'last_calibrated', 'next_calibration_due', 'deployed_with_modem_id',
+ 'ip_address', 'phone_number', 'hardware_model'
+ ]
+
+ writer = csv.DictWriter(output, fieldnames=fieldnames)
+ writer.writeheader()
+
+ for unit in units:
+ writer.writerow({
+ 'unit_id': unit.id,
+ 'unit_type': unit.unit_type or '',
+ 'device_type': unit.device_type or 'seismograph',
+ 'deployed': 'true' if unit.deployed else 'false',
+ 'retired': 'true' if unit.retired else 'false',
+ 'note': unit.note or '',
+ 'project_id': unit.project_id or '',
+ 'location': unit.location or '',
+ 'address': unit.address or '',
+ 'coordinates': unit.coordinates or '',
+ 'last_calibrated': unit.last_calibrated.strftime('%Y-%m-%d') if unit.last_calibrated else '',
+ 'next_calibration_due': unit.next_calibration_due.strftime('%Y-%m-%d') if unit.next_calibration_due else '',
+ 'deployed_with_modem_id': unit.deployed_with_modem_id or '',
+ 'ip_address': unit.ip_address or '',
+ 'phone_number': unit.phone_number or '',
+ 'hardware_model': unit.hardware_model or ''
+ })
+
+ output.seek(0)
+ filename = f"roster_export_{date.today().isoformat()}.csv"
+
+ return StreamingResponse(
+ io.BytesIO(output.getvalue().encode('utf-8')),
+ media_type="text/csv",
+ headers={"Content-Disposition": f"attachment; filename={filename}"}
+ )
+
+
+@router.get("/stats")
+def get_table_stats(db: Session = Depends(get_db)):
+ """Get counts for all tables"""
+ roster_count = db.query(RosterUnit).count()
+ emitters_count = db.query(Emitter).count()
+ ignored_count = db.query(IgnoredUnit).count()
+
+ return {
+ "roster": roster_count,
+ "emitters": emitters_count,
+ "ignored": ignored_count,
+ "total": roster_count + emitters_count + ignored_count
+ }
+
+
+@router.get("/roster-units")
+def get_all_roster_units(db: Session = Depends(get_db)):
+ """Get all roster units for management table"""
+ units = db.query(RosterUnit).order_by(RosterUnit.id).all()
+
+ return [{
+ "id": unit.id,
+ "device_type": unit.device_type or "seismograph",
+ "unit_type": unit.unit_type or "series3",
+ "deployed": unit.deployed,
+ "retired": unit.retired,
+ "note": unit.note or "",
+ "project_id": unit.project_id or "",
+ "location": unit.location or "",
+ "address": unit.address or "",
+ "coordinates": unit.coordinates or "",
+ "last_calibrated": unit.last_calibrated.isoformat() if unit.last_calibrated else None,
+ "next_calibration_due": unit.next_calibration_due.isoformat() if unit.next_calibration_due else None,
+ "deployed_with_modem_id": unit.deployed_with_modem_id or "",
+ "ip_address": unit.ip_address or "",
+ "phone_number": unit.phone_number or "",
+ "hardware_model": unit.hardware_model or "",
+ "last_updated": unit.last_updated.isoformat() if unit.last_updated else None
+ } for unit in units]
+
+
+def parse_date(date_str):
+ """Helper function to parse date strings"""
+ if not date_str or not date_str.strip():
+ return None
+ try:
+ return datetime.strptime(date_str.strip(), "%Y-%m-%d").date()
+ except ValueError:
+ return None
+
+
+@router.post("/import-csv-replace")
+async def import_csv_replace(
+ file: UploadFile = File(...),
+ db: Session = Depends(get_db)
+):
+ """
+ Replace all roster data with CSV import (atomic transaction).
+ Clears roster table first, then imports all rows from CSV.
+ """
+
+ if not file.filename.endswith('.csv'):
+ raise HTTPException(status_code=400, detail="File must be a CSV")
+
+ # Read and parse CSV
+ contents = await file.read()
+ csv_text = contents.decode('utf-8')
+ csv_reader = csv.DictReader(io.StringIO(csv_text))
+
+ # Parse all rows FIRST (fail fast before deletion)
+ parsed_units = []
+ for row_num, row in enumerate(csv_reader, start=2):
+ unit_id = row.get('unit_id', '').strip()
+ if not unit_id:
+ raise HTTPException(
+ status_code=400,
+ detail=f"Row {row_num}: Missing required field unit_id"
+ )
+
+ # Parse and validate dates
+ last_cal_date = parse_date(row.get('last_calibrated'))
+ next_cal_date = parse_date(row.get('next_calibration_due'))
+
+ parsed_units.append({
+ 'id': unit_id,
+ 'unit_type': row.get('unit_type', 'series3'),
+ 'device_type': row.get('device_type', 'seismograph'),
+ 'deployed': row.get('deployed', '').lower() in ('true', '1', 'yes'),
+ 'retired': row.get('retired', '').lower() in ('true', '1', 'yes'),
+ 'note': row.get('note', ''),
+ 'project_id': row.get('project_id') or None,
+ 'location': row.get('location') or None,
+ 'address': row.get('address') or None,
+ 'coordinates': row.get('coordinates') or None,
+ 'last_calibrated': last_cal_date,
+ 'next_calibration_due': next_cal_date,
+ 'deployed_with_modem_id': row.get('deployed_with_modem_id') or None,
+ 'ip_address': row.get('ip_address') or None,
+ 'phone_number': row.get('phone_number') or None,
+ 'hardware_model': row.get('hardware_model') or None,
+ })
+
+ # Atomic transaction: delete all, then insert all
+ try:
+ deleted_count = db.query(RosterUnit).delete()
+
+ for unit_data in parsed_units:
+ new_unit = RosterUnit(**unit_data, last_updated=datetime.utcnow())
+ db.add(new_unit)
+
+ db.commit()
+
+ return {
+ "message": "Roster replaced successfully",
+ "deleted": deleted_count,
+ "added": len(parsed_units)
+ }
+
+ except Exception as e:
+ db.rollback()
+ raise HTTPException(status_code=500, detail=f"Import failed: {str(e)}")
+
+
+@router.post("/clear-all")
+def clear_all_data(db: Session = Depends(get_db)):
+ """Clear all tables (roster, emitters, ignored)"""
+ try:
+ roster_count = db.query(RosterUnit).delete()
+ emitters_count = db.query(Emitter).delete()
+ ignored_count = db.query(IgnoredUnit).delete()
+
+ db.commit()
+
+ return {
+ "message": "All data cleared",
+ "deleted": {
+ "roster": roster_count,
+ "emitters": emitters_count,
+ "ignored": ignored_count,
+ "total": roster_count + emitters_count + ignored_count
+ }
+ }
+ except Exception as e:
+ db.rollback()
+ raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
+
+
+@router.post("/clear-roster")
+def clear_roster(db: Session = Depends(get_db)):
+ """Clear roster table only"""
+ try:
+ count = db.query(RosterUnit).delete()
+ db.commit()
+ return {"message": "Roster cleared", "deleted": count}
+ except Exception as e:
+ db.rollback()
+ raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
+
+
+@router.post("/clear-emitters")
+def clear_emitters(db: Session = Depends(get_db)):
+ """Clear emitters table only"""
+ try:
+ count = db.query(Emitter).delete()
+ db.commit()
+ return {"message": "Emitters cleared", "deleted": count}
+ except Exception as e:
+ db.rollback()
+ raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
+
+
+@router.post("/clear-ignored")
+def clear_ignored(db: Session = Depends(get_db)):
+ """Clear ignored units table only"""
+ try:
+ count = db.query(IgnoredUnit).delete()
+ db.commit()
+ return {"message": "Ignored units cleared", "deleted": count}
+ except Exception as e:
+ db.rollback()
+ raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
diff --git a/backend/services/snapshot.py b/backend/services/snapshot.py
index f71b2f5..478d987 100644
--- a/backend/services/snapshot.py
+++ b/backend/services/snapshot.py
@@ -69,6 +69,18 @@ def emit_status_snapshot():
"deployed": r.deployed,
"note": r.note or "",
"retired": r.retired,
+ # Device type and type-specific fields
+ "device_type": r.device_type or "seismograph",
+ "last_calibrated": r.last_calibrated.isoformat() if r.last_calibrated else None,
+ "next_calibration_due": r.next_calibration_due.isoformat() if r.next_calibration_due else None,
+ "deployed_with_modem_id": r.deployed_with_modem_id,
+ "ip_address": r.ip_address,
+ "phone_number": r.phone_number,
+ "hardware_model": r.hardware_model,
+ # Location for mapping
+ "location": r.location or "",
+ "address": r.address or "",
+ "coordinates": r.coordinates or "",
}
# --- Add unexpected emitter-only units ---
@@ -84,6 +96,18 @@ def emit_status_snapshot():
"deployed": False, # default
"note": "",
"retired": False,
+ # Device type and type-specific fields (defaults for unknown units)
+ "device_type": "seismograph", # default
+ "last_calibrated": None,
+ "next_calibration_due": None,
+ "deployed_with_modem_id": None,
+ "ip_address": None,
+ "phone_number": None,
+ "hardware_model": None,
+ # Location fields
+ "location": "",
+ "address": "",
+ "coordinates": "",
}
# Separate buckets for UI
@@ -121,9 +145,10 @@ def emit_status_snapshot():
"benched": len(benched_units),
"retired": len(retired_units),
"unknown": len(unknown_units),
- "ok": sum(1 for u in units.values() if u["status"] == "OK"),
- "pending": sum(1 for u in units.values() if u["status"] == "Pending"),
- "missing": sum(1 for u in units.values() if u["status"] == "Missing"),
+ # Status counts only for deployed units (active_units)
+ "ok": sum(1 for u in active_units.values() if u["status"] == "OK"),
+ "pending": sum(1 for u in active_units.values() if u["status"] == "Pending"),
+ "missing": sum(1 for u in active_units.values() if u["status"] == "Missing"),
}
}
finally:
diff --git a/create_test_db.py b/create_test_db.py
new file mode 100644
index 0000000..11b44fc
--- /dev/null
+++ b/create_test_db.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python3
+"""
+Create a fresh test database with the new schema and some sample data.
+"""
+
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+from datetime import datetime, date, timedelta
+from backend.models import Base, RosterUnit, Emitter
+
+# Create a new test database
+TEST_DB_PATH = "/tmp/sfm_test.db"
+engine = create_engine(f"sqlite:///{TEST_DB_PATH}", connect_args={"check_same_thread": False})
+
+# Drop all tables and recreate them with the new schema
+Base.metadata.drop_all(bind=engine)
+Base.metadata.create_all(bind=engine)
+
+# Create a session
+SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
+db = SessionLocal()
+
+try:
+ # Add some test seismographs
+ seismo1 = RosterUnit(
+ id="BE9449",
+ device_type="seismograph",
+ unit_type="series3",
+ deployed=True,
+ note="Primary field unit",
+ project_id="PROJ-001",
+ location="Site A",
+ last_calibrated=date(2024, 1, 15),
+ next_calibration_due=date(2025, 1, 15),
+ deployed_with_modem_id="MDM001",
+ last_updated=datetime.utcnow(),
+ )
+
+ seismo2 = RosterUnit(
+ id="BE9450",
+ device_type="seismograph",
+ unit_type="series3",
+ deployed=False,
+ note="Benched for maintenance",
+ project_id="PROJ-001",
+ location="Warehouse",
+ last_calibrated=date(2023, 6, 20),
+ next_calibration_due=date(2024, 6, 20), # Past due
+ last_updated=datetime.utcnow(),
+ )
+
+ # Add some test modems
+ modem1 = RosterUnit(
+ id="MDM001",
+ device_type="modem",
+ unit_type="modem",
+ deployed=True,
+ note="Paired with BE9449",
+ project_id="PROJ-001",
+ location="Site A",
+ ip_address="192.168.1.100",
+ phone_number="+1-555-0123",
+ hardware_model="Raven XTV",
+ last_updated=datetime.utcnow(),
+ )
+
+ modem2 = RosterUnit(
+ id="MDM002",
+ device_type="modem",
+ unit_type="modem",
+ deployed=False,
+ note="Spare modem",
+ project_id="PROJ-001",
+ location="Warehouse",
+ ip_address="192.168.1.101",
+ phone_number="+1-555-0124",
+ hardware_model="Raven XT",
+ last_updated=datetime.utcnow(),
+ )
+
+ # Add test emitters (status reports)
+ emitter1 = Emitter(
+ id="BE9449",
+ unit_type="series3",
+ last_seen=datetime.utcnow() - timedelta(hours=2),
+ last_file="BE9449.2024.336.12.00.mseed",
+ status="OK",
+ notes="Running normally",
+ )
+
+ emitter2 = Emitter(
+ id="BE9450",
+ unit_type="series3",
+ last_seen=datetime.utcnow() - timedelta(days=30),
+ last_file="BE9450.2024.306.08.00.mseed",
+ status="Missing",
+ notes="No data received",
+ )
+
+ # Add all units
+ db.add_all([seismo1, seismo2, modem1, modem2, emitter1, emitter2])
+ db.commit()
+
+ print(f"✓ Test database created at {TEST_DB_PATH}")
+ print(f"✓ Added 2 seismographs (BE9449, BE9450)")
+ print(f"✓ Added 2 modems (MDM001, MDM002)")
+ print(f"✓ Added 2 emitter status reports")
+ print(f"\nDatabase is ready for testing!")
+
+except Exception as e:
+ print(f"Error creating test database: {e}")
+ db.rollback()
+ raise
+finally:
+ db.close()
diff --git a/templates/base.html b/templates/base.html
index eb72c35..e2d496c 100644
--- a/templates/base.html
+++ b/templates/base.html
@@ -68,7 +68,7 @@
Seismo
Fleet Manager
-
v0.1.1
+v 0.2.1
@@ -94,7 +94,7 @@ Projects - +