feat: add manual SD card data upload for offline NRLs; rename RecordingSession to MonitoringSession
- Add POST /api/projects/{project_id}/nrl/{location_id}/upload-data endpoint
accepting a ZIP or multi-file select of .rnd/.rnh files from an SD card.
Parses .rnh metadata for session start/stop times, serial number, and store
name. Creates a MonitoringSession (no unit assignment required) and DataFile
records for each measurement file.
- Add Upload Data button and collapsible upload panel to the NRL detail Data
Files tab, with inline success/error feedback and automatic file list refresh
via HTMX after import.
- Rename RecordingSession -> MonitoringSession throughout the codebase
(models.py, projects.py, project_locations.py, scheduler.py, roster_rename.py,
main.py, init_projects_db.py, scripts/rename_unit.py). DB table renamed from
recording_sessions to monitoring_sessions; old indexes dropped and recreated.
- Update all template UI copy from Recording Sessions to Monitoring Sessions
(nrl_detail, projects/detail, session_list, schedule_oneoff, roster).
- Add backend/migrate_rename_recording_to_monitoring_sessions.py for applying
the table rename on production databases before deploying this build.
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -14,6 +14,12 @@ from typing import Optional
|
||||
import uuid
|
||||
import json
|
||||
|
||||
from fastapi import UploadFile, File
|
||||
import zipfile
|
||||
import hashlib
|
||||
import io
|
||||
from pathlib import Path
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.models import (
|
||||
Project,
|
||||
@@ -21,7 +27,8 @@ from backend.models import (
|
||||
MonitoringLocation,
|
||||
UnitAssignment,
|
||||
RosterUnit,
|
||||
RecordingSession,
|
||||
MonitoringSession,
|
||||
DataFile,
|
||||
)
|
||||
from backend.templates_config import templates
|
||||
|
||||
@@ -70,8 +77,8 @@ async def get_project_locations(
|
||||
if assignment:
|
||||
assigned_unit = db.query(RosterUnit).filter_by(id=assignment.unit_id).first()
|
||||
|
||||
# Count recording sessions
|
||||
session_count = db.query(RecordingSession).filter_by(
|
||||
# Count monitoring sessions
|
||||
session_count = db.query(MonitoringSession).filter_by(
|
||||
location_id=location.id
|
||||
).count()
|
||||
|
||||
@@ -370,19 +377,19 @@ async def unassign_unit(
|
||||
if not assignment:
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
|
||||
# Check if there are active recording sessions
|
||||
active_sessions = db.query(RecordingSession).filter(
|
||||
# Check if there are active monitoring sessions
|
||||
active_sessions = db.query(MonitoringSession).filter(
|
||||
and_(
|
||||
RecordingSession.location_id == assignment.location_id,
|
||||
RecordingSession.unit_id == assignment.unit_id,
|
||||
RecordingSession.status == "recording",
|
||||
MonitoringSession.location_id == assignment.location_id,
|
||||
MonitoringSession.unit_id == assignment.unit_id,
|
||||
MonitoringSession.status == "recording",
|
||||
)
|
||||
).count()
|
||||
|
||||
if active_sessions > 0:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Cannot unassign unit with active recording sessions. Stop recording first.",
|
||||
detail="Cannot unassign unit with active monitoring sessions. Stop monitoring first.",
|
||||
)
|
||||
|
||||
assignment.status = "completed"
|
||||
@@ -451,14 +458,12 @@ async def get_nrl_sessions(
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Get recording sessions for a specific NRL.
|
||||
Get monitoring sessions for a specific NRL.
|
||||
Returns HTML partial with session list.
|
||||
"""
|
||||
from backend.models import RecordingSession, RosterUnit
|
||||
|
||||
sessions = db.query(RecordingSession).filter_by(
|
||||
sessions = db.query(MonitoringSession).filter_by(
|
||||
location_id=location_id
|
||||
).order_by(RecordingSession.started_at.desc()).all()
|
||||
).order_by(MonitoringSession.started_at.desc()).all()
|
||||
|
||||
# Enrich with unit details
|
||||
sessions_data = []
|
||||
@@ -491,14 +496,12 @@ async def get_nrl_files(
|
||||
Get data files for a specific NRL.
|
||||
Returns HTML partial with file list.
|
||||
"""
|
||||
from backend.models import DataFile, RecordingSession
|
||||
|
||||
# Join DataFile with RecordingSession to filter by location_id
|
||||
# Join DataFile with MonitoringSession to filter by location_id
|
||||
files = db.query(DataFile).join(
|
||||
RecordingSession,
|
||||
DataFile.session_id == RecordingSession.id
|
||||
MonitoringSession,
|
||||
DataFile.session_id == MonitoringSession.id
|
||||
).filter(
|
||||
RecordingSession.location_id == location_id
|
||||
MonitoringSession.location_id == location_id
|
||||
).order_by(DataFile.created_at.desc()).all()
|
||||
|
||||
# Enrich with session details
|
||||
@@ -506,7 +509,7 @@ async def get_nrl_files(
|
||||
for file in files:
|
||||
session = None
|
||||
if file.session_id:
|
||||
session = db.query(RecordingSession).filter_by(id=file.session_id).first()
|
||||
session = db.query(MonitoringSession).filter_by(id=file.session_id).first()
|
||||
|
||||
files_data.append({
|
||||
"file": file,
|
||||
@@ -519,3 +522,217 @@ async def get_nrl_files(
|
||||
"location_id": location_id,
|
||||
"files": files_data,
|
||||
})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Manual SD Card Data Upload
|
||||
# ============================================================================
|
||||
|
||||
def _parse_rnh(content: bytes) -> dict:
|
||||
"""
|
||||
Parse a Rion .rnh metadata file (INI-style with [Section] headers).
|
||||
Returns a dict of key metadata fields.
|
||||
"""
|
||||
result = {}
|
||||
try:
|
||||
text = content.decode("utf-8", errors="replace")
|
||||
for line in text.splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("["):
|
||||
continue
|
||||
if "," in line:
|
||||
key, _, value = line.partition(",")
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
if key == "Serial Number":
|
||||
result["serial_number"] = value
|
||||
elif key == "Store Name":
|
||||
result["store_name"] = value
|
||||
elif key == "Index Number":
|
||||
result["index_number"] = value
|
||||
elif key == "Measurement Start Time":
|
||||
result["start_time_str"] = value
|
||||
elif key == "Measurement Stop Time":
|
||||
result["stop_time_str"] = value
|
||||
elif key == "Total Measurement Time":
|
||||
result["total_time_str"] = value
|
||||
except Exception:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
def _parse_rnh_datetime(s: str):
|
||||
"""Parse RNH datetime string: '2026/02/17 19:00:19' -> datetime"""
|
||||
from datetime import datetime
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return datetime.strptime(s.strip(), "%Y/%m/%d %H:%M:%S")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _classify_file(filename: str) -> str:
|
||||
"""Classify a file by name into a DataFile file_type."""
|
||||
name = filename.lower()
|
||||
if name.endswith(".rnh"):
|
||||
return "log"
|
||||
if name.endswith(".rnd"):
|
||||
return "measurement"
|
||||
if name.endswith(".zip"):
|
||||
return "archive"
|
||||
return "data"
|
||||
|
||||
|
||||
@router.post("/nrl/{location_id}/upload-data")
|
||||
async def upload_nrl_data(
|
||||
project_id: str,
|
||||
location_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
files: list[UploadFile] = File(...),
|
||||
):
|
||||
"""
|
||||
Manually upload SD card data for an offline NRL.
|
||||
|
||||
Accepts either:
|
||||
- A single .zip file (the Auto_#### folder zipped) — auto-extracted
|
||||
- Multiple .rnd / .rnh files selected directly from the SD card folder
|
||||
|
||||
Creates a MonitoringSession from .rnh metadata and DataFile records
|
||||
for each measurement file. No unit assignment required.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
# Verify project and location exist
|
||||
location = db.query(MonitoringLocation).filter_by(
|
||||
id=location_id, project_id=project_id
|
||||
).first()
|
||||
if not location:
|
||||
raise HTTPException(status_code=404, detail="Location not found")
|
||||
|
||||
# --- Step 1: Normalize to (filename, bytes) list ---
|
||||
file_entries: list[tuple[str, bytes]] = []
|
||||
|
||||
if len(files) == 1 and files[0].filename.lower().endswith(".zip"):
|
||||
raw = await files[0].read()
|
||||
try:
|
||||
with zipfile.ZipFile(io.BytesIO(raw)) as zf:
|
||||
for info in zf.infolist():
|
||||
if info.is_dir():
|
||||
continue
|
||||
name = Path(info.filename).name # strip folder path
|
||||
if not name:
|
||||
continue
|
||||
file_entries.append((name, zf.read(info)))
|
||||
except zipfile.BadZipFile:
|
||||
raise HTTPException(status_code=400, detail="Uploaded file is not a valid ZIP archive.")
|
||||
else:
|
||||
for uf in files:
|
||||
data = await uf.read()
|
||||
file_entries.append((uf.filename, data))
|
||||
|
||||
if not file_entries:
|
||||
raise HTTPException(status_code=400, detail="No usable files found in upload.")
|
||||
|
||||
# --- Step 2: Find and parse .rnh metadata ---
|
||||
rnh_meta = {}
|
||||
for fname, fbytes in file_entries:
|
||||
if fname.lower().endswith(".rnh"):
|
||||
rnh_meta = _parse_rnh(fbytes)
|
||||
break
|
||||
|
||||
started_at = _parse_rnh_datetime(rnh_meta.get("start_time_str")) or datetime.utcnow()
|
||||
stopped_at = _parse_rnh_datetime(rnh_meta.get("stop_time_str"))
|
||||
duration_seconds = None
|
||||
if started_at and stopped_at:
|
||||
duration_seconds = int((stopped_at - started_at).total_seconds())
|
||||
|
||||
store_name = rnh_meta.get("store_name", "")
|
||||
serial_number = rnh_meta.get("serial_number", "")
|
||||
index_number = rnh_meta.get("index_number", "")
|
||||
|
||||
# --- Step 3: Create MonitoringSession ---
|
||||
session_id = str(uuid.uuid4())
|
||||
monitoring_session = MonitoringSession(
|
||||
id=session_id,
|
||||
project_id=project_id,
|
||||
location_id=location_id,
|
||||
unit_id=None,
|
||||
session_type="sound",
|
||||
started_at=started_at,
|
||||
stopped_at=stopped_at,
|
||||
duration_seconds=duration_seconds,
|
||||
status="completed",
|
||||
session_metadata=json.dumps({
|
||||
"source": "manual_upload",
|
||||
"store_name": store_name,
|
||||
"serial_number": serial_number,
|
||||
"index_number": index_number,
|
||||
}),
|
||||
)
|
||||
db.add(monitoring_session)
|
||||
db.commit()
|
||||
db.refresh(monitoring_session)
|
||||
|
||||
# --- Step 4: Write files to disk and create DataFile records ---
|
||||
output_dir = Path("data/Projects") / project_id / session_id
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
leq_count = 0
|
||||
lp_count = 0
|
||||
metadata_count = 0
|
||||
files_imported = 0
|
||||
|
||||
for fname, fbytes in file_entries:
|
||||
file_type = _classify_file(fname)
|
||||
fname_lower = fname.lower()
|
||||
|
||||
# Track counts for summary
|
||||
if fname_lower.endswith(".rnd"):
|
||||
if "_leq_" in fname_lower:
|
||||
leq_count += 1
|
||||
elif "_lp" in fname_lower:
|
||||
lp_count += 1
|
||||
elif fname_lower.endswith(".rnh"):
|
||||
metadata_count += 1
|
||||
|
||||
# Write to disk
|
||||
dest = output_dir / fname
|
||||
dest.write_bytes(fbytes)
|
||||
|
||||
# Compute checksum
|
||||
checksum = hashlib.sha256(fbytes).hexdigest()
|
||||
|
||||
# Store relative path from data/ dir
|
||||
rel_path = str(dest.relative_to("data"))
|
||||
|
||||
data_file = DataFile(
|
||||
id=str(uuid.uuid4()),
|
||||
session_id=session_id,
|
||||
file_path=rel_path,
|
||||
file_type=file_type,
|
||||
file_size_bytes=len(fbytes),
|
||||
downloaded_at=datetime.utcnow(),
|
||||
checksum=checksum,
|
||||
file_metadata=json.dumps({
|
||||
"source": "manual_upload",
|
||||
"original_filename": fname,
|
||||
"store_name": store_name,
|
||||
}),
|
||||
)
|
||||
db.add(data_file)
|
||||
files_imported += 1
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"session_id": session_id,
|
||||
"files_imported": files_imported,
|
||||
"leq_files": leq_count,
|
||||
"lp_files": lp_count,
|
||||
"metadata_files": metadata_count,
|
||||
"store_name": store_name,
|
||||
"started_at": started_at.isoformat() if started_at else None,
|
||||
"stopped_at": stopped_at.isoformat() if stopped_at else None,
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user