chore: modular monolith folder split (no behavior change)
This commit is contained in:
0
app/seismo/routers/__init__.py
Normal file
0
app/seismo/routers/__init__.py
Normal file
146
app/seismo/routers/activity.py
Normal file
146
app/seismo/routers/activity.py
Normal file
@@ -0,0 +1,146 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import desc
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Dict, Any
|
||||
from backend.database import get_db
|
||||
from backend.models import UnitHistory, Emitter, RosterUnit
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["activity"])
|
||||
|
||||
PHOTOS_BASE_DIR = Path("data/photos")
|
||||
|
||||
|
||||
@router.get("/recent-activity")
|
||||
def get_recent_activity(limit: int = 20, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Get recent activity feed combining unit history changes and photo uploads.
|
||||
Returns a unified timeline of events sorted by timestamp (newest first).
|
||||
"""
|
||||
activities = []
|
||||
|
||||
# Get recent history entries
|
||||
history_entries = db.query(UnitHistory)\
|
||||
.order_by(desc(UnitHistory.changed_at))\
|
||||
.limit(limit * 2)\
|
||||
.all() # Get more than needed to mix with photos
|
||||
|
||||
for entry in history_entries:
|
||||
activity = {
|
||||
"type": "history",
|
||||
"timestamp": entry.changed_at.isoformat(),
|
||||
"timestamp_unix": entry.changed_at.timestamp(),
|
||||
"unit_id": entry.unit_id,
|
||||
"change_type": entry.change_type,
|
||||
"field_name": entry.field_name,
|
||||
"old_value": entry.old_value,
|
||||
"new_value": entry.new_value,
|
||||
"source": entry.source,
|
||||
"notes": entry.notes
|
||||
}
|
||||
activities.append(activity)
|
||||
|
||||
# Get recent photos
|
||||
if PHOTOS_BASE_DIR.exists():
|
||||
image_extensions = {".jpg", ".jpeg", ".png", ".gif", ".webp"}
|
||||
photo_activities = []
|
||||
|
||||
for unit_dir in PHOTOS_BASE_DIR.iterdir():
|
||||
if not unit_dir.is_dir():
|
||||
continue
|
||||
|
||||
unit_id = unit_dir.name
|
||||
|
||||
for file_path in unit_dir.iterdir():
|
||||
if file_path.is_file() and file_path.suffix.lower() in image_extensions:
|
||||
modified_time = file_path.stat().st_mtime
|
||||
photo_activities.append({
|
||||
"type": "photo",
|
||||
"timestamp": datetime.fromtimestamp(modified_time).isoformat(),
|
||||
"timestamp_unix": modified_time,
|
||||
"unit_id": unit_id,
|
||||
"filename": file_path.name,
|
||||
"photo_url": f"/api/unit/{unit_id}/photo/{file_path.name}"
|
||||
})
|
||||
|
||||
activities.extend(photo_activities)
|
||||
|
||||
# Sort all activities by timestamp (newest first)
|
||||
activities.sort(key=lambda x: x["timestamp_unix"], reverse=True)
|
||||
|
||||
# Limit to requested number
|
||||
activities = activities[:limit]
|
||||
|
||||
return {
|
||||
"activities": activities,
|
||||
"total": len(activities)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/recent-callins")
|
||||
def get_recent_callins(hours: int = 6, limit: int = None, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Get recent unit call-ins (units that have reported recently).
|
||||
Returns units sorted by most recent last_seen timestamp.
|
||||
|
||||
Args:
|
||||
hours: Look back this many hours (default: 6)
|
||||
limit: Maximum number of results (default: None = all)
|
||||
"""
|
||||
# Calculate the time threshold
|
||||
time_threshold = datetime.now(timezone.utc) - timedelta(hours=hours)
|
||||
|
||||
# Query emitters with recent activity, joined with roster info
|
||||
recent_emitters = db.query(Emitter)\
|
||||
.filter(Emitter.last_seen >= time_threshold)\
|
||||
.order_by(desc(Emitter.last_seen))\
|
||||
.all()
|
||||
|
||||
# Get roster info for all units
|
||||
roster_dict = {r.id: r for r in db.query(RosterUnit).all()}
|
||||
|
||||
call_ins = []
|
||||
for emitter in recent_emitters:
|
||||
roster_unit = roster_dict.get(emitter.id)
|
||||
|
||||
# Calculate time since last seen
|
||||
last_seen_utc = emitter.last_seen.replace(tzinfo=timezone.utc) if emitter.last_seen.tzinfo is None else emitter.last_seen
|
||||
time_diff = datetime.now(timezone.utc) - last_seen_utc
|
||||
|
||||
# Format time ago
|
||||
if time_diff.total_seconds() < 60:
|
||||
time_ago = "just now"
|
||||
elif time_diff.total_seconds() < 3600:
|
||||
minutes = int(time_diff.total_seconds() / 60)
|
||||
time_ago = f"{minutes}m ago"
|
||||
else:
|
||||
hours_ago = time_diff.total_seconds() / 3600
|
||||
if hours_ago < 24:
|
||||
time_ago = f"{int(hours_ago)}h {int((hours_ago % 1) * 60)}m ago"
|
||||
else:
|
||||
days = int(hours_ago / 24)
|
||||
time_ago = f"{days}d ago"
|
||||
|
||||
call_in = {
|
||||
"unit_id": emitter.id,
|
||||
"last_seen": emitter.last_seen.isoformat(),
|
||||
"time_ago": time_ago,
|
||||
"status": emitter.status,
|
||||
"device_type": roster_unit.device_type if roster_unit else "seismograph",
|
||||
"deployed": roster_unit.deployed if roster_unit else False,
|
||||
"note": roster_unit.note if roster_unit and roster_unit.note else "",
|
||||
"location": roster_unit.address if roster_unit and roster_unit.address else (roster_unit.location if roster_unit else "")
|
||||
}
|
||||
call_ins.append(call_in)
|
||||
|
||||
# Apply limit if specified
|
||||
if limit:
|
||||
call_ins = call_ins[:limit]
|
||||
|
||||
return {
|
||||
"call_ins": call_ins,
|
||||
"total": len(call_ins),
|
||||
"hours": hours,
|
||||
"time_threshold": time_threshold.isoformat()
|
||||
}
|
||||
25
app/seismo/routers/dashboard.py
Normal file
25
app/seismo/routers/dashboard.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from fastapi import APIRouter, Request, Depends
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from backend.services.snapshot import emit_status_snapshot
|
||||
|
||||
router = APIRouter()
|
||||
templates = Jinja2Templates(directory="templates")
|
||||
|
||||
|
||||
@router.get("/dashboard/active")
|
||||
def dashboard_active(request: Request):
|
||||
snapshot = emit_status_snapshot()
|
||||
return templates.TemplateResponse(
|
||||
"partials/active_table.html",
|
||||
{"request": request, "units": snapshot["active"]}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/dashboard/benched")
|
||||
def dashboard_benched(request: Request):
|
||||
snapshot = emit_status_snapshot()
|
||||
return templates.TemplateResponse(
|
||||
"partials/benched_table.html",
|
||||
{"request": request, "units": snapshot["benched"]}
|
||||
)
|
||||
34
app/seismo/routers/dashboard_tabs.py
Normal file
34
app/seismo/routers/dashboard_tabs.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# backend/routers/dashboard_tabs.py
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.services.snapshot import emit_status_snapshot
|
||||
|
||||
router = APIRouter(prefix="/dashboard", tags=["dashboard-tabs"])
|
||||
|
||||
@router.get("/active")
|
||||
def get_active_units(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Return only ACTIVE (deployed) units for dashboard table swap.
|
||||
"""
|
||||
snap = emit_status_snapshot()
|
||||
units = {
|
||||
uid: u
|
||||
for uid, u in snap["units"].items()
|
||||
if u["deployed"] is True
|
||||
}
|
||||
return {"units": units}
|
||||
|
||||
@router.get("/benched")
|
||||
def get_benched_units(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Return only BENCHED (not deployed) units for dashboard table swap.
|
||||
"""
|
||||
snap = emit_status_snapshot()
|
||||
units = {
|
||||
uid: u
|
||||
for uid, u in snap["units"].items()
|
||||
if u["deployed"] is False
|
||||
}
|
||||
return {"units": units}
|
||||
242
app/seismo/routers/photos.py
Normal file
242
app/seismo/routers/photos.py
Normal file
@@ -0,0 +1,242 @@
|
||||
from fastapi import APIRouter, HTTPException, UploadFile, File, Depends
|
||||
from fastapi.responses import FileResponse, JSONResponse
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
import os
|
||||
import shutil
|
||||
from PIL import Image
|
||||
from PIL.ExifTags import TAGS, GPSTAGS
|
||||
from sqlalchemy.orm import Session
|
||||
from backend.database import get_db
|
||||
from backend.models import RosterUnit
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["photos"])
|
||||
|
||||
PHOTOS_BASE_DIR = Path("data/photos")
|
||||
|
||||
|
||||
def extract_exif_data(image_path: Path) -> dict:
|
||||
"""
|
||||
Extract EXIF metadata from an image file.
|
||||
Returns dict with timestamp, GPS coordinates, and other metadata.
|
||||
"""
|
||||
try:
|
||||
image = Image.open(image_path)
|
||||
exif_data = image._getexif()
|
||||
|
||||
if not exif_data:
|
||||
return {}
|
||||
|
||||
metadata = {}
|
||||
|
||||
# Extract standard EXIF tags
|
||||
for tag_id, value in exif_data.items():
|
||||
tag = TAGS.get(tag_id, tag_id)
|
||||
|
||||
# Extract datetime
|
||||
if tag == "DateTime" or tag == "DateTimeOriginal":
|
||||
try:
|
||||
metadata["timestamp"] = datetime.strptime(str(value), "%Y:%m:%d %H:%M:%S")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Extract GPS data
|
||||
if tag == "GPSInfo":
|
||||
gps_data = {}
|
||||
for gps_tag_id in value:
|
||||
gps_tag = GPSTAGS.get(gps_tag_id, gps_tag_id)
|
||||
gps_data[gps_tag] = value[gps_tag_id]
|
||||
|
||||
# Convert GPS data to decimal degrees
|
||||
lat = gps_data.get("GPSLatitude")
|
||||
lat_ref = gps_data.get("GPSLatitudeRef")
|
||||
lon = gps_data.get("GPSLongitude")
|
||||
lon_ref = gps_data.get("GPSLongitudeRef")
|
||||
|
||||
if lat and lon and lat_ref and lon_ref:
|
||||
# Convert to decimal degrees
|
||||
lat_decimal = convert_to_degrees(lat)
|
||||
if lat_ref == "S":
|
||||
lat_decimal = -lat_decimal
|
||||
|
||||
lon_decimal = convert_to_degrees(lon)
|
||||
if lon_ref == "W":
|
||||
lon_decimal = -lon_decimal
|
||||
|
||||
metadata["latitude"] = lat_decimal
|
||||
metadata["longitude"] = lon_decimal
|
||||
metadata["coordinates"] = f"{lat_decimal},{lon_decimal}"
|
||||
|
||||
return metadata
|
||||
except Exception as e:
|
||||
print(f"Error extracting EXIF data: {e}")
|
||||
return {}
|
||||
|
||||
|
||||
def convert_to_degrees(value):
|
||||
"""
|
||||
Convert GPS coordinates from degrees/minutes/seconds to decimal degrees.
|
||||
"""
|
||||
d, m, s = value
|
||||
return float(d) + (float(m) / 60.0) + (float(s) / 3600.0)
|
||||
|
||||
|
||||
@router.post("/unit/{unit_id}/upload-photo")
|
||||
async def upload_photo(
|
||||
unit_id: str,
|
||||
photo: UploadFile = File(...),
|
||||
auto_populate_coords: bool = True,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Upload a photo for a unit and extract EXIF metadata.
|
||||
If GPS data exists and auto_populate_coords is True, update the unit's coordinates.
|
||||
"""
|
||||
# Validate file type
|
||||
allowed_extensions = {".jpg", ".jpeg", ".png", ".gif", ".webp"}
|
||||
file_ext = Path(photo.filename).suffix.lower()
|
||||
|
||||
if file_ext not in allowed_extensions:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid file type. Allowed: {', '.join(allowed_extensions)}"
|
||||
)
|
||||
|
||||
# Create photos directory for this unit
|
||||
unit_photo_dir = PHOTOS_BASE_DIR / unit_id
|
||||
unit_photo_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate filename with timestamp to avoid collisions
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"{timestamp}_{photo.filename}"
|
||||
file_path = unit_photo_dir / filename
|
||||
|
||||
# Save the file
|
||||
try:
|
||||
with open(file_path, "wb") as buffer:
|
||||
shutil.copyfileobj(photo.file, buffer)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to save photo: {str(e)}")
|
||||
|
||||
# Extract EXIF metadata
|
||||
metadata = extract_exif_data(file_path)
|
||||
|
||||
# Update unit coordinates if GPS data exists and auto_populate_coords is True
|
||||
coordinates_updated = False
|
||||
if auto_populate_coords and "coordinates" in metadata:
|
||||
roster_unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
||||
|
||||
if roster_unit:
|
||||
roster_unit.coordinates = metadata["coordinates"]
|
||||
roster_unit.last_updated = datetime.utcnow()
|
||||
db.commit()
|
||||
coordinates_updated = True
|
||||
|
||||
return JSONResponse(content={
|
||||
"success": True,
|
||||
"filename": filename,
|
||||
"file_path": f"/api/unit/{unit_id}/photo/{filename}",
|
||||
"metadata": {
|
||||
"timestamp": metadata.get("timestamp").isoformat() if metadata.get("timestamp") else None,
|
||||
"latitude": metadata.get("latitude"),
|
||||
"longitude": metadata.get("longitude"),
|
||||
"coordinates": metadata.get("coordinates")
|
||||
},
|
||||
"coordinates_updated": coordinates_updated
|
||||
})
|
||||
|
||||
|
||||
@router.get("/unit/{unit_id}/photos")
|
||||
def get_unit_photos(unit_id: str):
|
||||
"""
|
||||
Reads /data/photos/<unit_id>/ and returns list of image filenames.
|
||||
Primary photo = most recent file.
|
||||
"""
|
||||
unit_photo_dir = PHOTOS_BASE_DIR / unit_id
|
||||
|
||||
if not unit_photo_dir.exists():
|
||||
# Return empty list if no photos directory exists
|
||||
return {
|
||||
"unit_id": unit_id,
|
||||
"photos": [],
|
||||
"primary_photo": None
|
||||
}
|
||||
|
||||
# Get all image files
|
||||
image_extensions = {".jpg", ".jpeg", ".png", ".gif", ".webp"}
|
||||
photos = []
|
||||
|
||||
for file_path in unit_photo_dir.iterdir():
|
||||
if file_path.is_file() and file_path.suffix.lower() in image_extensions:
|
||||
photos.append({
|
||||
"filename": file_path.name,
|
||||
"path": f"/api/unit/{unit_id}/photo/{file_path.name}",
|
||||
"modified": file_path.stat().st_mtime
|
||||
})
|
||||
|
||||
# Sort by modification time (most recent first)
|
||||
photos.sort(key=lambda x: x["modified"], reverse=True)
|
||||
|
||||
# Primary photo is the most recent
|
||||
primary_photo = photos[0]["filename"] if photos else None
|
||||
|
||||
return {
|
||||
"unit_id": unit_id,
|
||||
"photos": [p["filename"] for p in photos],
|
||||
"primary_photo": primary_photo,
|
||||
"photo_urls": [p["path"] for p in photos]
|
||||
}
|
||||
|
||||
|
||||
@router.get("/recent-photos")
|
||||
def get_recent_photos(limit: int = 12):
|
||||
"""
|
||||
Get the most recently uploaded photos across all units.
|
||||
Returns photos sorted by modification time (newest first).
|
||||
"""
|
||||
if not PHOTOS_BASE_DIR.exists():
|
||||
return {"photos": []}
|
||||
|
||||
all_photos = []
|
||||
image_extensions = {".jpg", ".jpeg", ".png", ".gif", ".webp"}
|
||||
|
||||
# Scan all unit directories
|
||||
for unit_dir in PHOTOS_BASE_DIR.iterdir():
|
||||
if not unit_dir.is_dir():
|
||||
continue
|
||||
|
||||
unit_id = unit_dir.name
|
||||
|
||||
# Get all photos in this unit's directory
|
||||
for file_path in unit_dir.iterdir():
|
||||
if file_path.is_file() and file_path.suffix.lower() in image_extensions:
|
||||
all_photos.append({
|
||||
"unit_id": unit_id,
|
||||
"filename": file_path.name,
|
||||
"path": f"/api/unit/{unit_id}/photo/{file_path.name}",
|
||||
"modified": file_path.stat().st_mtime,
|
||||
"modified_iso": datetime.fromtimestamp(file_path.stat().st_mtime).isoformat()
|
||||
})
|
||||
|
||||
# Sort by modification time (most recent first) and limit
|
||||
all_photos.sort(key=lambda x: x["modified"], reverse=True)
|
||||
recent_photos = all_photos[:limit]
|
||||
|
||||
return {
|
||||
"photos": recent_photos,
|
||||
"total": len(all_photos)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/unit/{unit_id}/photo/{filename}")
|
||||
def get_photo(unit_id: str, filename: str):
|
||||
"""
|
||||
Serves a specific photo file.
|
||||
"""
|
||||
file_path = PHOTOS_BASE_DIR / unit_id / filename
|
||||
|
||||
if not file_path.exists() or not file_path.is_file():
|
||||
raise HTTPException(status_code=404, detail="Photo not found")
|
||||
|
||||
return FileResponse(file_path)
|
||||
46
app/seismo/routers/roster.py
Normal file
46
app/seismo/routers/roster.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
import random
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.services.snapshot import emit_status_snapshot
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["roster"])
|
||||
|
||||
|
||||
@router.get("/status-snapshot")
|
||||
def get_status_snapshot(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Calls emit_status_snapshot() to get current fleet status.
|
||||
This will be replaced with real Series3 emitter logic later.
|
||||
"""
|
||||
return emit_status_snapshot()
|
||||
|
||||
|
||||
@router.get("/roster")
|
||||
def get_roster(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Returns list of units with their metadata and status.
|
||||
Uses mock data for now.
|
||||
"""
|
||||
snapshot = emit_status_snapshot()
|
||||
units_list = []
|
||||
|
||||
for unit_id, unit_data in snapshot["units"].items():
|
||||
units_list.append({
|
||||
"id": unit_id,
|
||||
"status": unit_data["status"],
|
||||
"age": unit_data["age"],
|
||||
"last_seen": unit_data["last"],
|
||||
"deployed": unit_data["deployed"],
|
||||
"note": unit_data.get("note", ""),
|
||||
"last_file": unit_data.get("fname", "")
|
||||
})
|
||||
|
||||
# Sort by status priority (Missing > Pending > OK) then by ID
|
||||
status_priority = {"Missing": 0, "Pending": 1, "OK": 2}
|
||||
units_list.sort(key=lambda x: (status_priority.get(x["status"], 3), x["id"]))
|
||||
|
||||
return {"units": units_list}
|
||||
720
app/seismo/routers/roster_edit.py
Normal file
720
app/seismo/routers/roster_edit.py
Normal file
@@ -0,0 +1,720 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Form, UploadFile, File, Request
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, date
|
||||
import csv
|
||||
import io
|
||||
import logging
|
||||
import httpx
|
||||
import os
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.models import RosterUnit, IgnoredUnit, Emitter, UnitHistory
|
||||
|
||||
router = APIRouter(prefix="/api/roster", tags=["roster-edit"])
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# SLMM backend URL for syncing device configs to cache
|
||||
SLMM_BASE_URL = os.getenv("SLMM_BASE_URL", "http://localhost:8100")
|
||||
|
||||
|
||||
def record_history(db: Session, unit_id: str, change_type: str, field_name: str = None,
|
||||
old_value: str = None, new_value: str = None, source: str = "manual", notes: str = None):
|
||||
"""Helper function to record a change in unit history"""
|
||||
history_entry = UnitHistory(
|
||||
unit_id=unit_id,
|
||||
change_type=change_type,
|
||||
field_name=field_name,
|
||||
old_value=old_value,
|
||||
new_value=new_value,
|
||||
changed_at=datetime.utcnow(),
|
||||
source=source,
|
||||
notes=notes
|
||||
)
|
||||
db.add(history_entry)
|
||||
# Note: caller is responsible for db.commit()
|
||||
|
||||
|
||||
def get_or_create_roster_unit(db: Session, unit_id: str):
|
||||
unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
||||
if not unit:
|
||||
unit = RosterUnit(id=unit_id)
|
||||
db.add(unit)
|
||||
db.commit()
|
||||
db.refresh(unit)
|
||||
return unit
|
||||
|
||||
|
||||
async def sync_slm_to_slmm_cache(
|
||||
unit_id: str,
|
||||
host: str = None,
|
||||
tcp_port: int = None,
|
||||
ftp_port: int = None,
|
||||
ftp_username: str = None,
|
||||
ftp_password: str = None,
|
||||
deployed_with_modem_id: str = None,
|
||||
db: Session = None
|
||||
) -> dict:
|
||||
"""
|
||||
Sync SLM device configuration to SLMM backend cache.
|
||||
|
||||
Terra-View is the source of truth for device configs. This function updates
|
||||
SLMM's config cache (NL43Config table) so SLMM can look up device connection
|
||||
info by unit_id without Terra-View passing host:port with every request.
|
||||
|
||||
Args:
|
||||
unit_id: Unique identifier for the SLM device
|
||||
host: Direct IP address/hostname OR will be resolved from modem
|
||||
tcp_port: TCP control port (default: 2255)
|
||||
ftp_port: FTP port (default: 21)
|
||||
ftp_username: FTP username (optional)
|
||||
ftp_password: FTP password (optional)
|
||||
deployed_with_modem_id: If set, resolve modem IP as host
|
||||
db: Database session for modem lookup
|
||||
|
||||
Returns:
|
||||
dict: {"success": bool, "message": str}
|
||||
"""
|
||||
# Resolve host from modem if assigned
|
||||
if deployed_with_modem_id and db:
|
||||
modem = db.query(RosterUnit).filter_by(
|
||||
id=deployed_with_modem_id,
|
||||
device_type="modem"
|
||||
).first()
|
||||
if modem and modem.ip_address:
|
||||
host = modem.ip_address
|
||||
logger.info(f"Resolved host from modem {deployed_with_modem_id}: {host}")
|
||||
|
||||
# Validate required fields
|
||||
if not host:
|
||||
logger.warning(f"Cannot sync SLM {unit_id} to SLMM: no host/IP address provided")
|
||||
return {"success": False, "message": "No host IP address available"}
|
||||
|
||||
# Set defaults
|
||||
tcp_port = tcp_port or 2255
|
||||
ftp_port = ftp_port or 21
|
||||
|
||||
# Build SLMM cache payload
|
||||
config_payload = {
|
||||
"host": host,
|
||||
"tcp_port": tcp_port,
|
||||
"tcp_enabled": True,
|
||||
"ftp_enabled": bool(ftp_username and ftp_password),
|
||||
"web_enabled": False
|
||||
}
|
||||
|
||||
if ftp_username and ftp_password:
|
||||
config_payload["ftp_username"] = ftp_username
|
||||
config_payload["ftp_password"] = ftp_password
|
||||
|
||||
# Call SLMM cache update API
|
||||
slmm_url = f"{SLMM_BASE_URL}/api/nl43/{unit_id}/config"
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.put(slmm_url, json=config_payload)
|
||||
|
||||
if response.status_code in [200, 201]:
|
||||
logger.info(f"Successfully synced SLM {unit_id} to SLMM cache")
|
||||
return {"success": True, "message": "Device config cached in SLMM"}
|
||||
else:
|
||||
logger.error(f"SLMM cache sync failed for {unit_id}: HTTP {response.status_code}")
|
||||
return {"success": False, "message": f"SLMM returned status {response.status_code}"}
|
||||
|
||||
except httpx.ConnectError:
|
||||
logger.error(f"Cannot connect to SLMM service at {SLMM_BASE_URL}")
|
||||
return {"success": False, "message": "SLMM service unavailable"}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error syncing SLM {unit_id} to SLMM: {e}")
|
||||
return {"success": False, "message": str(e)}
|
||||
|
||||
|
||||
@router.post("/add")
|
||||
async def add_roster_unit(
|
||||
id: str = Form(...),
|
||||
device_type: str = Form("seismograph"),
|
||||
unit_type: str = Form("series3"),
|
||||
deployed: str = Form(None),
|
||||
retired: str = Form(None),
|
||||
note: str = Form(""),
|
||||
project_id: str = Form(None),
|
||||
location: str = Form(None),
|
||||
address: str = Form(None),
|
||||
coordinates: str = Form(None),
|
||||
# Seismograph-specific fields
|
||||
last_calibrated: str = Form(None),
|
||||
next_calibration_due: str = Form(None),
|
||||
deployed_with_modem_id: str = Form(None),
|
||||
# Modem-specific fields
|
||||
ip_address: str = Form(None),
|
||||
phone_number: str = Form(None),
|
||||
hardware_model: str = Form(None),
|
||||
# Sound Level Meter-specific fields
|
||||
slm_host: str = Form(None),
|
||||
slm_tcp_port: str = Form(None),
|
||||
slm_ftp_port: str = Form(None),
|
||||
slm_model: str = Form(None),
|
||||
slm_serial_number: str = Form(None),
|
||||
slm_frequency_weighting: str = Form(None),
|
||||
slm_time_weighting: str = Form(None),
|
||||
slm_measurement_range: str = Form(None),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
logger.info(f"Adding unit: id={id}, device_type={device_type}, deployed={deployed}, retired={retired}")
|
||||
|
||||
# Convert boolean strings to actual booleans
|
||||
deployed_bool = deployed in ['true', 'True', '1', 'yes'] if deployed else False
|
||||
retired_bool = retired in ['true', 'True', '1', 'yes'] if retired else False
|
||||
|
||||
# Convert port strings to integers
|
||||
slm_tcp_port_int = int(slm_tcp_port) if slm_tcp_port and slm_tcp_port.strip() else None
|
||||
slm_ftp_port_int = int(slm_ftp_port) if slm_ftp_port and slm_ftp_port.strip() else None
|
||||
|
||||
if db.query(RosterUnit).filter(RosterUnit.id == id).first():
|
||||
raise HTTPException(status_code=400, detail="Unit already exists")
|
||||
|
||||
# Parse date fields if provided
|
||||
last_cal_date = None
|
||||
if last_calibrated:
|
||||
try:
|
||||
last_cal_date = datetime.strptime(last_calibrated, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid last_calibrated date format. Use YYYY-MM-DD")
|
||||
|
||||
next_cal_date = None
|
||||
if next_calibration_due:
|
||||
try:
|
||||
next_cal_date = datetime.strptime(next_calibration_due, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid next_calibration_due date format. Use YYYY-MM-DD")
|
||||
|
||||
unit = RosterUnit(
|
||||
id=id,
|
||||
device_type=device_type,
|
||||
unit_type=unit_type,
|
||||
deployed=deployed_bool,
|
||||
retired=retired_bool,
|
||||
note=note,
|
||||
project_id=project_id,
|
||||
location=location,
|
||||
address=address,
|
||||
coordinates=coordinates,
|
||||
last_updated=datetime.utcnow(),
|
||||
# Seismograph-specific fields
|
||||
last_calibrated=last_cal_date,
|
||||
next_calibration_due=next_cal_date,
|
||||
deployed_with_modem_id=deployed_with_modem_id if deployed_with_modem_id else None,
|
||||
# Modem-specific fields
|
||||
ip_address=ip_address if ip_address else None,
|
||||
phone_number=phone_number if phone_number else None,
|
||||
hardware_model=hardware_model if hardware_model else None,
|
||||
# Sound Level Meter-specific fields
|
||||
slm_host=slm_host if slm_host else None,
|
||||
slm_tcp_port=slm_tcp_port_int,
|
||||
slm_ftp_port=slm_ftp_port_int,
|
||||
slm_model=slm_model if slm_model else None,
|
||||
slm_serial_number=slm_serial_number if slm_serial_number else None,
|
||||
slm_frequency_weighting=slm_frequency_weighting if slm_frequency_weighting else None,
|
||||
slm_time_weighting=slm_time_weighting if slm_time_weighting else None,
|
||||
slm_measurement_range=slm_measurement_range if slm_measurement_range else None,
|
||||
)
|
||||
db.add(unit)
|
||||
db.commit()
|
||||
|
||||
# If sound level meter, sync config to SLMM cache
|
||||
if device_type == "sound_level_meter":
|
||||
logger.info(f"Syncing SLM {id} config to SLMM cache...")
|
||||
result = await sync_slm_to_slmm_cache(
|
||||
unit_id=id,
|
||||
host=slm_host,
|
||||
tcp_port=slm_tcp_port_int,
|
||||
ftp_port=slm_ftp_port_int,
|
||||
deployed_with_modem_id=deployed_with_modem_id,
|
||||
db=db
|
||||
)
|
||||
|
||||
if not result["success"]:
|
||||
logger.warning(f"SLMM cache sync warning for {id}: {result['message']}")
|
||||
# Don't fail the operation - device is still added to Terra-View roster
|
||||
# User can manually sync later or SLMM will be synced on next config update
|
||||
|
||||
return {"message": "Unit added", "id": id, "device_type": device_type}
|
||||
|
||||
|
||||
@router.get("/modems")
|
||||
def get_modems_list(db: Session = Depends(get_db)):
|
||||
"""Get list of all modem units for dropdown selection"""
|
||||
modems = db.query(RosterUnit).filter_by(device_type="modem", retired=False).order_by(RosterUnit.id).all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": modem.id,
|
||||
"ip_address": modem.ip_address,
|
||||
"phone_number": modem.phone_number,
|
||||
"hardware_model": modem.hardware_model,
|
||||
"deployed": modem.deployed
|
||||
}
|
||||
for modem in modems
|
||||
]
|
||||
|
||||
|
||||
@router.get("/{unit_id}")
|
||||
def get_roster_unit(unit_id: str, db: Session = Depends(get_db)):
|
||||
"""Get a single roster unit by ID"""
|
||||
unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
||||
if not unit:
|
||||
raise HTTPException(status_code=404, detail="Unit not found")
|
||||
|
||||
return {
|
||||
"id": unit.id,
|
||||
"device_type": unit.device_type or "seismograph",
|
||||
"unit_type": unit.unit_type,
|
||||
"deployed": unit.deployed,
|
||||
"retired": unit.retired,
|
||||
"note": unit.note or "",
|
||||
"project_id": unit.project_id or "",
|
||||
"location": unit.location or "",
|
||||
"address": unit.address or "",
|
||||
"coordinates": unit.coordinates or "",
|
||||
"last_calibrated": unit.last_calibrated.isoformat() if unit.last_calibrated else "",
|
||||
"next_calibration_due": unit.next_calibration_due.isoformat() if unit.next_calibration_due else "",
|
||||
"deployed_with_modem_id": unit.deployed_with_modem_id or "",
|
||||
"ip_address": unit.ip_address or "",
|
||||
"phone_number": unit.phone_number or "",
|
||||
"hardware_model": unit.hardware_model or "",
|
||||
"slm_host": unit.slm_host or "",
|
||||
"slm_tcp_port": unit.slm_tcp_port or "",
|
||||
"slm_ftp_port": unit.slm_ftp_port or "",
|
||||
"slm_model": unit.slm_model or "",
|
||||
"slm_serial_number": unit.slm_serial_number or "",
|
||||
"slm_frequency_weighting": unit.slm_frequency_weighting or "",
|
||||
"slm_time_weighting": unit.slm_time_weighting or "",
|
||||
"slm_measurement_range": unit.slm_measurement_range or "",
|
||||
}
|
||||
|
||||
|
||||
@router.post("/edit/{unit_id}")
|
||||
def edit_roster_unit(
|
||||
unit_id: str,
|
||||
device_type: str = Form("seismograph"),
|
||||
unit_type: str = Form("series3"),
|
||||
deployed: str = Form(None),
|
||||
retired: str = Form(None),
|
||||
note: str = Form(""),
|
||||
project_id: str = Form(None),
|
||||
location: str = Form(None),
|
||||
address: str = Form(None),
|
||||
coordinates: str = Form(None),
|
||||
# Seismograph-specific fields
|
||||
last_calibrated: str = Form(None),
|
||||
next_calibration_due: str = Form(None),
|
||||
deployed_with_modem_id: str = Form(None),
|
||||
# Modem-specific fields
|
||||
ip_address: str = Form(None),
|
||||
phone_number: str = Form(None),
|
||||
hardware_model: str = Form(None),
|
||||
# Sound Level Meter-specific fields
|
||||
slm_host: str = Form(None),
|
||||
slm_tcp_port: str = Form(None),
|
||||
slm_ftp_port: str = Form(None),
|
||||
slm_model: str = Form(None),
|
||||
slm_serial_number: str = Form(None),
|
||||
slm_frequency_weighting: str = Form(None),
|
||||
slm_time_weighting: str = Form(None),
|
||||
slm_measurement_range: str = Form(None),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
||||
if not unit:
|
||||
raise HTTPException(status_code=404, detail="Unit not found")
|
||||
|
||||
# Convert boolean strings to actual booleans
|
||||
deployed_bool = deployed in ['true', 'True', '1', 'yes'] if deployed else False
|
||||
retired_bool = retired in ['true', 'True', '1', 'yes'] if retired else False
|
||||
|
||||
# Convert port strings to integers
|
||||
slm_tcp_port_int = int(slm_tcp_port) if slm_tcp_port and slm_tcp_port.strip() else None
|
||||
slm_ftp_port_int = int(slm_ftp_port) if slm_ftp_port and slm_ftp_port.strip() else None
|
||||
|
||||
# Parse date fields if provided
|
||||
last_cal_date = None
|
||||
if last_calibrated:
|
||||
try:
|
||||
last_cal_date = datetime.strptime(last_calibrated, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid last_calibrated date format. Use YYYY-MM-DD")
|
||||
|
||||
next_cal_date = None
|
||||
if next_calibration_due:
|
||||
try:
|
||||
next_cal_date = datetime.strptime(next_calibration_due, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid next_calibration_due date format. Use YYYY-MM-DD")
|
||||
|
||||
# Track changes for history
|
||||
old_note = unit.note
|
||||
old_deployed = unit.deployed
|
||||
old_retired = unit.retired
|
||||
|
||||
# Update all fields
|
||||
unit.device_type = device_type
|
||||
unit.unit_type = unit_type
|
||||
unit.deployed = deployed_bool
|
||||
unit.retired = retired_bool
|
||||
unit.note = note
|
||||
unit.project_id = project_id
|
||||
unit.location = location
|
||||
unit.address = address
|
||||
unit.coordinates = coordinates
|
||||
unit.last_updated = datetime.utcnow()
|
||||
|
||||
# Seismograph-specific fields
|
||||
unit.last_calibrated = last_cal_date
|
||||
unit.next_calibration_due = next_cal_date
|
||||
unit.deployed_with_modem_id = deployed_with_modem_id if deployed_with_modem_id else None
|
||||
|
||||
# Modem-specific fields
|
||||
unit.ip_address = ip_address if ip_address else None
|
||||
unit.phone_number = phone_number if phone_number else None
|
||||
unit.hardware_model = hardware_model if hardware_model else None
|
||||
|
||||
# Sound Level Meter-specific fields
|
||||
unit.slm_host = slm_host if slm_host else None
|
||||
unit.slm_tcp_port = slm_tcp_port_int
|
||||
unit.slm_ftp_port = slm_ftp_port_int
|
||||
unit.slm_model = slm_model if slm_model else None
|
||||
unit.slm_serial_number = slm_serial_number if slm_serial_number else None
|
||||
unit.slm_frequency_weighting = slm_frequency_weighting if slm_frequency_weighting else None
|
||||
unit.slm_time_weighting = slm_time_weighting if slm_time_weighting else None
|
||||
unit.slm_measurement_range = slm_measurement_range if slm_measurement_range else None
|
||||
|
||||
# Record history entries for changed fields
|
||||
if old_note != note:
|
||||
record_history(db, unit_id, "note_change", "note", old_note, note, "manual")
|
||||
|
||||
if old_deployed != deployed:
|
||||
status_text = "deployed" if deployed else "benched"
|
||||
old_status_text = "deployed" if old_deployed else "benched"
|
||||
record_history(db, unit_id, "deployed_change", "deployed", old_status_text, status_text, "manual")
|
||||
|
||||
if old_retired != retired:
|
||||
status_text = "retired" if retired else "active"
|
||||
old_status_text = "retired" if old_retired else "active"
|
||||
record_history(db, unit_id, "retired_change", "retired", old_status_text, status_text, "manual")
|
||||
|
||||
db.commit()
|
||||
return {"message": "Unit updated", "id": unit_id, "device_type": device_type}
|
||||
|
||||
|
||||
@router.post("/set-deployed/{unit_id}")
|
||||
def set_deployed(unit_id: str, deployed: bool = Form(...), db: Session = Depends(get_db)):
|
||||
unit = get_or_create_roster_unit(db, unit_id)
|
||||
old_deployed = unit.deployed
|
||||
unit.deployed = deployed
|
||||
unit.last_updated = datetime.utcnow()
|
||||
|
||||
# Record history entry for deployed status change
|
||||
if old_deployed != deployed:
|
||||
status_text = "deployed" if deployed else "benched"
|
||||
old_status_text = "deployed" if old_deployed else "benched"
|
||||
record_history(
|
||||
db=db,
|
||||
unit_id=unit_id,
|
||||
change_type="deployed_change",
|
||||
field_name="deployed",
|
||||
old_value=old_status_text,
|
||||
new_value=status_text,
|
||||
source="manual"
|
||||
)
|
||||
|
||||
db.commit()
|
||||
return {"message": "Updated", "id": unit_id, "deployed": deployed}
|
||||
|
||||
|
||||
@router.post("/set-retired/{unit_id}")
|
||||
def set_retired(unit_id: str, retired: bool = Form(...), db: Session = Depends(get_db)):
|
||||
unit = get_or_create_roster_unit(db, unit_id)
|
||||
old_retired = unit.retired
|
||||
unit.retired = retired
|
||||
unit.last_updated = datetime.utcnow()
|
||||
|
||||
# Record history entry for retired status change
|
||||
if old_retired != retired:
|
||||
status_text = "retired" if retired else "active"
|
||||
old_status_text = "retired" if old_retired else "active"
|
||||
record_history(
|
||||
db=db,
|
||||
unit_id=unit_id,
|
||||
change_type="retired_change",
|
||||
field_name="retired",
|
||||
old_value=old_status_text,
|
||||
new_value=status_text,
|
||||
source="manual"
|
||||
)
|
||||
|
||||
db.commit()
|
||||
return {"message": "Updated", "id": unit_id, "retired": retired}
|
||||
|
||||
|
||||
@router.delete("/{unit_id}")
|
||||
def delete_roster_unit(unit_id: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Permanently delete a unit from the database.
|
||||
Checks roster, emitters, and ignored_units tables and deletes from any table where the unit exists.
|
||||
"""
|
||||
deleted = False
|
||||
|
||||
# Try to delete from roster table
|
||||
roster_unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
||||
if roster_unit:
|
||||
db.delete(roster_unit)
|
||||
deleted = True
|
||||
|
||||
# Try to delete from emitters table
|
||||
emitter = db.query(Emitter).filter(Emitter.id == unit_id).first()
|
||||
if emitter:
|
||||
db.delete(emitter)
|
||||
deleted = True
|
||||
|
||||
# Try to delete from ignored_units table
|
||||
ignored_unit = db.query(IgnoredUnit).filter(IgnoredUnit.id == unit_id).first()
|
||||
if ignored_unit:
|
||||
db.delete(ignored_unit)
|
||||
deleted = True
|
||||
|
||||
# If not found in any table, return error
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Unit not found")
|
||||
|
||||
db.commit()
|
||||
return {"message": "Unit deleted", "id": unit_id}
|
||||
|
||||
|
||||
@router.post("/set-note/{unit_id}")
|
||||
def set_note(unit_id: str, note: str = Form(""), db: Session = Depends(get_db)):
|
||||
unit = get_or_create_roster_unit(db, unit_id)
|
||||
old_note = unit.note
|
||||
unit.note = note
|
||||
unit.last_updated = datetime.utcnow()
|
||||
|
||||
# Record history entry for note change
|
||||
if old_note != note:
|
||||
record_history(
|
||||
db=db,
|
||||
unit_id=unit_id,
|
||||
change_type="note_change",
|
||||
field_name="note",
|
||||
old_value=old_note,
|
||||
new_value=note,
|
||||
source="manual"
|
||||
)
|
||||
|
||||
db.commit()
|
||||
return {"message": "Updated", "id": unit_id, "note": note}
|
||||
|
||||
|
||||
@router.post("/import-csv")
|
||||
async def import_csv(
|
||||
file: UploadFile = File(...),
|
||||
update_existing: bool = Form(True),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Import roster units from CSV file.
|
||||
|
||||
Expected CSV columns (unit_id is required, others are optional):
|
||||
- unit_id: Unique identifier for the unit
|
||||
- unit_type: Type of unit (default: "series3")
|
||||
- deployed: Boolean for deployment status (default: False)
|
||||
- retired: Boolean for retirement status (default: False)
|
||||
- note: Notes about the unit
|
||||
- project_id: Project identifier
|
||||
- location: Location description
|
||||
|
||||
Args:
|
||||
file: CSV file upload
|
||||
update_existing: If True, update existing units; if False, skip them
|
||||
"""
|
||||
|
||||
if not file.filename.endswith('.csv'):
|
||||
raise HTTPException(status_code=400, detail="File must be a CSV")
|
||||
|
||||
# Read file content
|
||||
contents = await file.read()
|
||||
csv_text = contents.decode('utf-8')
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_text))
|
||||
|
||||
results = {
|
||||
"added": [],
|
||||
"updated": [],
|
||||
"skipped": [],
|
||||
"errors": []
|
||||
}
|
||||
|
||||
for row_num, row in enumerate(csv_reader, start=2): # Start at 2 to account for header
|
||||
try:
|
||||
# Validate required field
|
||||
unit_id = row.get('unit_id', '').strip()
|
||||
if not unit_id:
|
||||
results["errors"].append({
|
||||
"row": row_num,
|
||||
"error": "Missing required field: unit_id"
|
||||
})
|
||||
continue
|
||||
|
||||
# Check if unit exists
|
||||
existing_unit = db.query(RosterUnit).filter(RosterUnit.id == unit_id).first()
|
||||
|
||||
if existing_unit:
|
||||
if not update_existing:
|
||||
results["skipped"].append(unit_id)
|
||||
continue
|
||||
|
||||
# Update existing unit
|
||||
existing_unit.unit_type = row.get('unit_type', existing_unit.unit_type or 'series3')
|
||||
existing_unit.deployed = row.get('deployed', '').lower() in ('true', '1', 'yes') if row.get('deployed') else existing_unit.deployed
|
||||
existing_unit.retired = row.get('retired', '').lower() in ('true', '1', 'yes') if row.get('retired') else existing_unit.retired
|
||||
existing_unit.note = row.get('note', existing_unit.note or '')
|
||||
existing_unit.project_id = row.get('project_id', existing_unit.project_id)
|
||||
existing_unit.location = row.get('location', existing_unit.location)
|
||||
existing_unit.address = row.get('address', existing_unit.address)
|
||||
existing_unit.coordinates = row.get('coordinates', existing_unit.coordinates)
|
||||
existing_unit.last_updated = datetime.utcnow()
|
||||
|
||||
results["updated"].append(unit_id)
|
||||
else:
|
||||
# Create new unit
|
||||
new_unit = RosterUnit(
|
||||
id=unit_id,
|
||||
unit_type=row.get('unit_type', 'series3'),
|
||||
deployed=row.get('deployed', '').lower() in ('true', '1', 'yes'),
|
||||
retired=row.get('retired', '').lower() in ('true', '1', 'yes'),
|
||||
note=row.get('note', ''),
|
||||
project_id=row.get('project_id'),
|
||||
location=row.get('location'),
|
||||
address=row.get('address'),
|
||||
coordinates=row.get('coordinates'),
|
||||
last_updated=datetime.utcnow()
|
||||
)
|
||||
db.add(new_unit)
|
||||
results["added"].append(unit_id)
|
||||
|
||||
except Exception as e:
|
||||
results["errors"].append({
|
||||
"row": row_num,
|
||||
"unit_id": row.get('unit_id', 'unknown'),
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
# Commit all changes
|
||||
try:
|
||||
db.commit()
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Database error: {str(e)}")
|
||||
|
||||
return {
|
||||
"message": "CSV import completed",
|
||||
"summary": {
|
||||
"added": len(results["added"]),
|
||||
"updated": len(results["updated"]),
|
||||
"skipped": len(results["skipped"]),
|
||||
"errors": len(results["errors"])
|
||||
},
|
||||
"details": results
|
||||
}
|
||||
|
||||
|
||||
@router.post("/ignore/{unit_id}")
|
||||
def ignore_unit(unit_id: str, reason: str = Form(""), db: Session = Depends(get_db)):
|
||||
"""
|
||||
Add a unit to the ignore list to suppress it from unknown emitters.
|
||||
"""
|
||||
# Check if already ignored
|
||||
if db.query(IgnoredUnit).filter(IgnoredUnit.id == unit_id).first():
|
||||
raise HTTPException(status_code=400, detail="Unit already ignored")
|
||||
|
||||
ignored = IgnoredUnit(
|
||||
id=unit_id,
|
||||
reason=reason,
|
||||
ignored_at=datetime.utcnow()
|
||||
)
|
||||
db.add(ignored)
|
||||
db.commit()
|
||||
return {"message": "Unit ignored", "id": unit_id}
|
||||
|
||||
|
||||
@router.delete("/ignore/{unit_id}")
|
||||
def unignore_unit(unit_id: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Remove a unit from the ignore list.
|
||||
"""
|
||||
ignored = db.query(IgnoredUnit).filter(IgnoredUnit.id == unit_id).first()
|
||||
if not ignored:
|
||||
raise HTTPException(status_code=404, detail="Unit not in ignore list")
|
||||
|
||||
db.delete(ignored)
|
||||
db.commit()
|
||||
return {"message": "Unit unignored", "id": unit_id}
|
||||
|
||||
|
||||
@router.get("/ignored")
|
||||
def list_ignored_units(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Get list of all ignored units.
|
||||
"""
|
||||
ignored_units = db.query(IgnoredUnit).all()
|
||||
return {
|
||||
"ignored": [
|
||||
{
|
||||
"id": unit.id,
|
||||
"reason": unit.reason,
|
||||
"ignored_at": unit.ignored_at.isoformat()
|
||||
}
|
||||
for unit in ignored_units
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@router.get("/history/{unit_id}")
|
||||
def get_unit_history(unit_id: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Get complete history timeline for a unit.
|
||||
Returns all historical changes ordered by most recent first.
|
||||
"""
|
||||
history_entries = db.query(UnitHistory).filter(
|
||||
UnitHistory.unit_id == unit_id
|
||||
).order_by(UnitHistory.changed_at.desc()).all()
|
||||
|
||||
return {
|
||||
"unit_id": unit_id,
|
||||
"history": [
|
||||
{
|
||||
"id": entry.id,
|
||||
"change_type": entry.change_type,
|
||||
"field_name": entry.field_name,
|
||||
"old_value": entry.old_value,
|
||||
"new_value": entry.new_value,
|
||||
"changed_at": entry.changed_at.isoformat(),
|
||||
"source": entry.source,
|
||||
"notes": entry.notes
|
||||
}
|
||||
for entry in history_entries
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/history/{history_id}")
|
||||
def delete_history_entry(history_id: int, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Delete a specific history entry by ID.
|
||||
Allows manual cleanup of old history entries.
|
||||
"""
|
||||
history_entry = db.query(UnitHistory).filter(UnitHistory.id == history_id).first()
|
||||
if not history_entry:
|
||||
raise HTTPException(status_code=404, detail="History entry not found")
|
||||
|
||||
db.delete(history_entry)
|
||||
db.commit()
|
||||
return {"message": "History entry deleted", "id": history_id}
|
||||
81
app/seismo/routers/seismo_dashboard.py
Normal file
81
app/seismo/routers/seismo_dashboard.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""
|
||||
Seismograph Dashboard API Router
|
||||
Provides endpoints for the seismograph-specific dashboard
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, Query
|
||||
from fastapi.responses import HTMLResponse
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from sqlalchemy.orm import Session
|
||||
from backend.database import get_db
|
||||
from backend.models import RosterUnit
|
||||
|
||||
router = APIRouter(prefix="/api/seismo-dashboard", tags=["seismo-dashboard"])
|
||||
templates = Jinja2Templates(directory="templates")
|
||||
|
||||
|
||||
@router.get("/stats", response_class=HTMLResponse)
|
||||
async def get_seismo_stats(request: Request, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Returns HTML partial with seismograph statistics summary
|
||||
"""
|
||||
# Get all seismograph units
|
||||
seismos = db.query(RosterUnit).filter_by(
|
||||
device_type="seismograph",
|
||||
retired=False
|
||||
).all()
|
||||
|
||||
total = len(seismos)
|
||||
deployed = sum(1 for s in seismos if s.deployed)
|
||||
benched = sum(1 for s in seismos if not s.deployed)
|
||||
|
||||
# Count modems assigned to deployed seismographs
|
||||
with_modem = sum(1 for s in seismos if s.deployed and s.deployed_with_modem_id)
|
||||
without_modem = deployed - with_modem
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"partials/seismo_stats.html",
|
||||
{
|
||||
"request": request,
|
||||
"total": total,
|
||||
"deployed": deployed,
|
||||
"benched": benched,
|
||||
"with_modem": with_modem,
|
||||
"without_modem": without_modem
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/units", response_class=HTMLResponse)
|
||||
async def get_seismo_units(
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
search: str = Query(None)
|
||||
):
|
||||
"""
|
||||
Returns HTML partial with filterable seismograph unit list
|
||||
"""
|
||||
query = db.query(RosterUnit).filter_by(
|
||||
device_type="seismograph",
|
||||
retired=False
|
||||
)
|
||||
|
||||
# Apply search filter
|
||||
if search:
|
||||
search_lower = search.lower()
|
||||
query = query.filter(
|
||||
(RosterUnit.id.ilike(f"%{search}%")) |
|
||||
(RosterUnit.note.ilike(f"%{search}%")) |
|
||||
(RosterUnit.address.ilike(f"%{search}%"))
|
||||
)
|
||||
|
||||
seismos = query.order_by(RosterUnit.id).all()
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"partials/seismo_unit_list.html",
|
||||
{
|
||||
"request": request,
|
||||
"units": seismos,
|
||||
"search": search or ""
|
||||
}
|
||||
)
|
||||
479
app/seismo/routers/settings.py
Normal file
479
app/seismo/routers/settings.py
Normal file
@@ -0,0 +1,479 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
|
||||
from fastapi.responses import StreamingResponse, FileResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, date
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
import csv
|
||||
import io
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.models import RosterUnit, Emitter, IgnoredUnit, UserPreferences
|
||||
from backend.services.database_backup import DatabaseBackupService
|
||||
|
||||
router = APIRouter(prefix="/api/settings", tags=["settings"])
|
||||
|
||||
|
||||
@router.get("/export-csv")
|
||||
def export_roster_csv(db: Session = Depends(get_db)):
|
||||
"""Export all roster units to CSV"""
|
||||
units = db.query(RosterUnit).all()
|
||||
|
||||
# Create CSV in memory
|
||||
output = io.StringIO()
|
||||
fieldnames = [
|
||||
'unit_id', 'unit_type', 'device_type', 'deployed', 'retired',
|
||||
'note', 'project_id', 'location', 'address', 'coordinates',
|
||||
'last_calibrated', 'next_calibration_due', 'deployed_with_modem_id',
|
||||
'ip_address', 'phone_number', 'hardware_model'
|
||||
]
|
||||
|
||||
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
||||
writer.writeheader()
|
||||
|
||||
for unit in units:
|
||||
writer.writerow({
|
||||
'unit_id': unit.id,
|
||||
'unit_type': unit.unit_type or '',
|
||||
'device_type': unit.device_type or 'seismograph',
|
||||
'deployed': 'true' if unit.deployed else 'false',
|
||||
'retired': 'true' if unit.retired else 'false',
|
||||
'note': unit.note or '',
|
||||
'project_id': unit.project_id or '',
|
||||
'location': unit.location or '',
|
||||
'address': unit.address or '',
|
||||
'coordinates': unit.coordinates or '',
|
||||
'last_calibrated': unit.last_calibrated.strftime('%Y-%m-%d') if unit.last_calibrated else '',
|
||||
'next_calibration_due': unit.next_calibration_due.strftime('%Y-%m-%d') if unit.next_calibration_due else '',
|
||||
'deployed_with_modem_id': unit.deployed_with_modem_id or '',
|
||||
'ip_address': unit.ip_address or '',
|
||||
'phone_number': unit.phone_number or '',
|
||||
'hardware_model': unit.hardware_model or ''
|
||||
})
|
||||
|
||||
output.seek(0)
|
||||
filename = f"roster_export_{date.today().isoformat()}.csv"
|
||||
|
||||
return StreamingResponse(
|
||||
io.BytesIO(output.getvalue().encode('utf-8')),
|
||||
media_type="text/csv",
|
||||
headers={"Content-Disposition": f"attachment; filename={filename}"}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
def get_table_stats(db: Session = Depends(get_db)):
|
||||
"""Get counts for all tables"""
|
||||
roster_count = db.query(RosterUnit).count()
|
||||
emitters_count = db.query(Emitter).count()
|
||||
ignored_count = db.query(IgnoredUnit).count()
|
||||
|
||||
return {
|
||||
"roster": roster_count,
|
||||
"emitters": emitters_count,
|
||||
"ignored": ignored_count,
|
||||
"total": roster_count + emitters_count + ignored_count
|
||||
}
|
||||
|
||||
|
||||
@router.get("/roster-units")
|
||||
def get_all_roster_units(db: Session = Depends(get_db)):
|
||||
"""Get all roster units for management table"""
|
||||
units = db.query(RosterUnit).order_by(RosterUnit.id).all()
|
||||
|
||||
return [{
|
||||
"id": unit.id,
|
||||
"device_type": unit.device_type or "seismograph",
|
||||
"unit_type": unit.unit_type or "series3",
|
||||
"deployed": unit.deployed,
|
||||
"retired": unit.retired,
|
||||
"note": unit.note or "",
|
||||
"project_id": unit.project_id or "",
|
||||
"location": unit.location or "",
|
||||
"address": unit.address or "",
|
||||
"coordinates": unit.coordinates or "",
|
||||
"last_calibrated": unit.last_calibrated.isoformat() if unit.last_calibrated else None,
|
||||
"next_calibration_due": unit.next_calibration_due.isoformat() if unit.next_calibration_due else None,
|
||||
"deployed_with_modem_id": unit.deployed_with_modem_id or "",
|
||||
"ip_address": unit.ip_address or "",
|
||||
"phone_number": unit.phone_number or "",
|
||||
"hardware_model": unit.hardware_model or "",
|
||||
"slm_host": unit.slm_host or "",
|
||||
"slm_tcp_port": unit.slm_tcp_port,
|
||||
"slm_model": unit.slm_model or "",
|
||||
"slm_serial_number": unit.slm_serial_number or "",
|
||||
"slm_frequency_weighting": unit.slm_frequency_weighting or "",
|
||||
"slm_time_weighting": unit.slm_time_weighting or "",
|
||||
"slm_measurement_range": unit.slm_measurement_range or "",
|
||||
"slm_last_check": unit.slm_last_check.isoformat() if unit.slm_last_check else None,
|
||||
"last_updated": unit.last_updated.isoformat() if unit.last_updated else None
|
||||
} for unit in units]
|
||||
|
||||
|
||||
def parse_date(date_str):
|
||||
"""Helper function to parse date strings"""
|
||||
if not date_str or not date_str.strip():
|
||||
return None
|
||||
try:
|
||||
return datetime.strptime(date_str.strip(), "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/import-csv-replace")
|
||||
async def import_csv_replace(
|
||||
file: UploadFile = File(...),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Replace all roster data with CSV import (atomic transaction).
|
||||
Clears roster table first, then imports all rows from CSV.
|
||||
"""
|
||||
|
||||
if not file.filename.endswith('.csv'):
|
||||
raise HTTPException(status_code=400, detail="File must be a CSV")
|
||||
|
||||
# Read and parse CSV
|
||||
contents = await file.read()
|
||||
csv_text = contents.decode('utf-8')
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_text))
|
||||
|
||||
# Parse all rows FIRST (fail fast before deletion)
|
||||
parsed_units = []
|
||||
for row_num, row in enumerate(csv_reader, start=2):
|
||||
unit_id = row.get('unit_id', '').strip()
|
||||
if not unit_id:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Row {row_num}: Missing required field unit_id"
|
||||
)
|
||||
|
||||
# Parse and validate dates
|
||||
last_cal_date = parse_date(row.get('last_calibrated'))
|
||||
next_cal_date = parse_date(row.get('next_calibration_due'))
|
||||
|
||||
parsed_units.append({
|
||||
'id': unit_id,
|
||||
'unit_type': row.get('unit_type', 'series3'),
|
||||
'device_type': row.get('device_type', 'seismograph'),
|
||||
'deployed': row.get('deployed', '').lower() in ('true', '1', 'yes'),
|
||||
'retired': row.get('retired', '').lower() in ('true', '1', 'yes'),
|
||||
'note': row.get('note', ''),
|
||||
'project_id': row.get('project_id') or None,
|
||||
'location': row.get('location') or None,
|
||||
'address': row.get('address') or None,
|
||||
'coordinates': row.get('coordinates') or None,
|
||||
'last_calibrated': last_cal_date,
|
||||
'next_calibration_due': next_cal_date,
|
||||
'deployed_with_modem_id': row.get('deployed_with_modem_id') or None,
|
||||
'ip_address': row.get('ip_address') or None,
|
||||
'phone_number': row.get('phone_number') or None,
|
||||
'hardware_model': row.get('hardware_model') or None,
|
||||
})
|
||||
|
||||
# Atomic transaction: delete all, then insert all
|
||||
try:
|
||||
deleted_count = db.query(RosterUnit).delete()
|
||||
|
||||
for unit_data in parsed_units:
|
||||
new_unit = RosterUnit(**unit_data, last_updated=datetime.utcnow())
|
||||
db.add(new_unit)
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": "Roster replaced successfully",
|
||||
"deleted": deleted_count,
|
||||
"added": len(parsed_units)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Import failed: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/clear-all")
|
||||
def clear_all_data(db: Session = Depends(get_db)):
|
||||
"""Clear all tables (roster, emitters, ignored)"""
|
||||
try:
|
||||
roster_count = db.query(RosterUnit).delete()
|
||||
emitters_count = db.query(Emitter).delete()
|
||||
ignored_count = db.query(IgnoredUnit).delete()
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": "All data cleared",
|
||||
"deleted": {
|
||||
"roster": roster_count,
|
||||
"emitters": emitters_count,
|
||||
"ignored": ignored_count,
|
||||
"total": roster_count + emitters_count + ignored_count
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/clear-roster")
|
||||
def clear_roster(db: Session = Depends(get_db)):
|
||||
"""Clear roster table only"""
|
||||
try:
|
||||
count = db.query(RosterUnit).delete()
|
||||
db.commit()
|
||||
return {"message": "Roster cleared", "deleted": count}
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/clear-emitters")
|
||||
def clear_emitters(db: Session = Depends(get_db)):
|
||||
"""Clear emitters table only"""
|
||||
try:
|
||||
count = db.query(Emitter).delete()
|
||||
db.commit()
|
||||
return {"message": "Emitters cleared", "deleted": count}
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/clear-ignored")
|
||||
def clear_ignored(db: Session = Depends(get_db)):
|
||||
"""Clear ignored units table only"""
|
||||
try:
|
||||
count = db.query(IgnoredUnit).delete()
|
||||
db.commit()
|
||||
return {"message": "Ignored units cleared", "deleted": count}
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
||||
|
||||
|
||||
# User Preferences Endpoints
|
||||
|
||||
class PreferencesUpdate(BaseModel):
|
||||
"""Schema for updating user preferences (all fields optional)"""
|
||||
timezone: Optional[str] = None
|
||||
theme: Optional[str] = None
|
||||
auto_refresh_interval: Optional[int] = None
|
||||
date_format: Optional[str] = None
|
||||
table_rows_per_page: Optional[int] = None
|
||||
calibration_interval_days: Optional[int] = None
|
||||
calibration_warning_days: Optional[int] = None
|
||||
status_ok_threshold_hours: Optional[int] = None
|
||||
status_pending_threshold_hours: Optional[int] = None
|
||||
|
||||
|
||||
@router.get("/preferences")
|
||||
def get_preferences(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Get user preferences. Creates default preferences if none exist.
|
||||
"""
|
||||
prefs = db.query(UserPreferences).filter(UserPreferences.id == 1).first()
|
||||
|
||||
if not prefs:
|
||||
# Create default preferences
|
||||
prefs = UserPreferences(id=1)
|
||||
db.add(prefs)
|
||||
db.commit()
|
||||
db.refresh(prefs)
|
||||
|
||||
return {
|
||||
"timezone": prefs.timezone,
|
||||
"theme": prefs.theme,
|
||||
"auto_refresh_interval": prefs.auto_refresh_interval,
|
||||
"date_format": prefs.date_format,
|
||||
"table_rows_per_page": prefs.table_rows_per_page,
|
||||
"calibration_interval_days": prefs.calibration_interval_days,
|
||||
"calibration_warning_days": prefs.calibration_warning_days,
|
||||
"status_ok_threshold_hours": prefs.status_ok_threshold_hours,
|
||||
"status_pending_threshold_hours": prefs.status_pending_threshold_hours,
|
||||
"updated_at": prefs.updated_at.isoformat() if prefs.updated_at else None
|
||||
}
|
||||
|
||||
|
||||
@router.put("/preferences")
|
||||
def update_preferences(
|
||||
updates: PreferencesUpdate,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Update user preferences. Accepts partial updates.
|
||||
Creates default preferences if none exist.
|
||||
"""
|
||||
prefs = db.query(UserPreferences).filter(UserPreferences.id == 1).first()
|
||||
|
||||
if not prefs:
|
||||
# Create default preferences
|
||||
prefs = UserPreferences(id=1)
|
||||
db.add(prefs)
|
||||
|
||||
# Update only provided fields
|
||||
update_data = updates.dict(exclude_unset=True)
|
||||
for field, value in update_data.items():
|
||||
setattr(prefs, field, value)
|
||||
|
||||
prefs.updated_at = datetime.utcnow()
|
||||
|
||||
db.commit()
|
||||
db.refresh(prefs)
|
||||
|
||||
return {
|
||||
"message": "Preferences updated successfully",
|
||||
"timezone": prefs.timezone,
|
||||
"theme": prefs.theme,
|
||||
"auto_refresh_interval": prefs.auto_refresh_interval,
|
||||
"date_format": prefs.date_format,
|
||||
"table_rows_per_page": prefs.table_rows_per_page,
|
||||
"calibration_interval_days": prefs.calibration_interval_days,
|
||||
"calibration_warning_days": prefs.calibration_warning_days,
|
||||
"status_ok_threshold_hours": prefs.status_ok_threshold_hours,
|
||||
"status_pending_threshold_hours": prefs.status_pending_threshold_hours,
|
||||
"updated_at": prefs.updated_at.isoformat() if prefs.updated_at else None
|
||||
}
|
||||
|
||||
|
||||
# Database Management Endpoints
|
||||
|
||||
backup_service = DatabaseBackupService()
|
||||
|
||||
|
||||
@router.get("/database/stats")
|
||||
def get_database_stats():
|
||||
"""Get current database statistics"""
|
||||
try:
|
||||
stats = backup_service.get_database_stats()
|
||||
return stats
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get database stats: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/database/snapshot")
|
||||
def create_database_snapshot(description: Optional[str] = None):
|
||||
"""Create a full database snapshot"""
|
||||
try:
|
||||
snapshot = backup_service.create_snapshot(description=description)
|
||||
return {
|
||||
"message": "Snapshot created successfully",
|
||||
"snapshot": snapshot
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Snapshot creation failed: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/database/snapshots")
|
||||
def list_database_snapshots():
|
||||
"""List all available database snapshots"""
|
||||
try:
|
||||
snapshots = backup_service.list_snapshots()
|
||||
return {
|
||||
"snapshots": snapshots,
|
||||
"count": len(snapshots)
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list snapshots: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/database/snapshot/{filename}")
|
||||
def download_snapshot(filename: str):
|
||||
"""Download a specific snapshot file"""
|
||||
try:
|
||||
snapshot_path = backup_service.download_snapshot(filename)
|
||||
return FileResponse(
|
||||
path=str(snapshot_path),
|
||||
filename=filename,
|
||||
media_type="application/x-sqlite3"
|
||||
)
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(status_code=404, detail=f"Snapshot {filename} not found")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Download failed: {str(e)}")
|
||||
|
||||
|
||||
@router.delete("/database/snapshot/{filename}")
|
||||
def delete_database_snapshot(filename: str):
|
||||
"""Delete a specific snapshot"""
|
||||
try:
|
||||
backup_service.delete_snapshot(filename)
|
||||
return {
|
||||
"message": f"Snapshot {filename} deleted successfully",
|
||||
"filename": filename
|
||||
}
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(status_code=404, detail=f"Snapshot {filename} not found")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Delete failed: {str(e)}")
|
||||
|
||||
|
||||
class RestoreRequest(BaseModel):
|
||||
"""Schema for restore request"""
|
||||
filename: str
|
||||
create_backup: bool = True
|
||||
|
||||
|
||||
@router.post("/database/restore")
|
||||
def restore_database(request: RestoreRequest, db: Session = Depends(get_db)):
|
||||
"""Restore database from a snapshot"""
|
||||
try:
|
||||
# Close the database connection before restoring
|
||||
db.close()
|
||||
|
||||
result = backup_service.restore_snapshot(
|
||||
filename=request.filename,
|
||||
create_backup_before_restore=request.create_backup
|
||||
)
|
||||
|
||||
return result
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(status_code=404, detail=f"Snapshot {request.filename} not found")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Restore failed: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/database/upload-snapshot")
|
||||
async def upload_snapshot(file: UploadFile = File(...)):
|
||||
"""Upload a snapshot file to the backups directory"""
|
||||
if not file.filename.endswith('.db'):
|
||||
raise HTTPException(status_code=400, detail="File must be a .db file")
|
||||
|
||||
try:
|
||||
# Save uploaded file to backups directory
|
||||
backups_dir = Path("./data/backups")
|
||||
backups_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
|
||||
uploaded_filename = f"snapshot_uploaded_{timestamp}.db"
|
||||
file_path = backups_dir / uploaded_filename
|
||||
|
||||
# Save file
|
||||
with open(file_path, "wb") as buffer:
|
||||
shutil.copyfileobj(file.file, buffer)
|
||||
|
||||
# Create metadata
|
||||
metadata = {
|
||||
"filename": uploaded_filename,
|
||||
"created_at": timestamp,
|
||||
"created_at_iso": datetime.utcnow().isoformat(),
|
||||
"description": f"Uploaded: {file.filename}",
|
||||
"size_bytes": file_path.stat().st_size,
|
||||
"size_mb": round(file_path.stat().st_size / (1024 * 1024), 2),
|
||||
"type": "uploaded"
|
||||
}
|
||||
|
||||
metadata_path = backups_dir / f"{uploaded_filename}.meta.json"
|
||||
import json
|
||||
with open(metadata_path, 'w') as f:
|
||||
json.dump(metadata, f, indent=2)
|
||||
|
||||
return {
|
||||
"message": "Snapshot uploaded successfully",
|
||||
"snapshot": metadata
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
|
||||
44
app/seismo/routers/units.py
Normal file
44
app/seismo/routers/units.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.services.snapshot import emit_status_snapshot
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["units"])
|
||||
|
||||
|
||||
@router.get("/unit/{unit_id}")
|
||||
def get_unit_detail(unit_id: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Returns detailed data for a single unit.
|
||||
"""
|
||||
snapshot = emit_status_snapshot()
|
||||
|
||||
if unit_id not in snapshot["units"]:
|
||||
raise HTTPException(status_code=404, detail=f"Unit {unit_id} not found")
|
||||
|
||||
unit_data = snapshot["units"][unit_id]
|
||||
|
||||
# Mock coordinates for now (will be replaced with real data)
|
||||
mock_coords = {
|
||||
"BE1234": {"lat": 37.7749, "lon": -122.4194, "location": "San Francisco, CA"},
|
||||
"BE5678": {"lat": 34.0522, "lon": -118.2437, "location": "Los Angeles, CA"},
|
||||
"BE9012": {"lat": 40.7128, "lon": -74.0060, "location": "New York, NY"},
|
||||
"BE3456": {"lat": 41.8781, "lon": -87.6298, "location": "Chicago, IL"},
|
||||
"BE7890": {"lat": 29.7604, "lon": -95.3698, "location": "Houston, TX"},
|
||||
}
|
||||
|
||||
coords = mock_coords.get(unit_id, {"lat": 39.8283, "lon": -98.5795, "location": "Unknown"})
|
||||
|
||||
return {
|
||||
"id": unit_id,
|
||||
"status": unit_data["status"],
|
||||
"age": unit_data["age"],
|
||||
"last_seen": unit_data["last"],
|
||||
"last_file": unit_data.get("fname", ""),
|
||||
"deployed": unit_data["deployed"],
|
||||
"note": unit_data.get("note", ""),
|
||||
"coordinates": coords
|
||||
}
|
||||
Reference in New Issue
Block a user