feat: Manual sound data uploads, standalone SLM type added.(no modem mode), Smart uploading with fuzzy name matching enabled.
This commit is contained in:
@@ -364,6 +364,15 @@ async def nrl_detail_page(
|
||||
)
|
||||
).first()
|
||||
|
||||
# Parse connection_mode from location_metadata JSON
|
||||
import json as _json
|
||||
connection_mode = "connected"
|
||||
try:
|
||||
meta = _json.loads(location.location_metadata or "{}")
|
||||
connection_mode = meta.get("connection_mode", "connected")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
template = "vibration_location_detail.html" if location.location_type == "vibration" else "nrl_detail.html"
|
||||
return templates.TemplateResponse(template, {
|
||||
"request": request,
|
||||
@@ -376,6 +385,7 @@ async def nrl_detail_page(
|
||||
"session_count": session_count,
|
||||
"file_count": file_count,
|
||||
"active_session": active_session,
|
||||
"connection_mode": connection_mode,
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -736,3 +736,67 @@ async def upload_nrl_data(
|
||||
"started_at": started_at.isoformat() if started_at else None,
|
||||
"stopped_at": stopped_at.isoformat() if stopped_at else None,
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# NRL Live Status (connected NRLs only)
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/nrl/{location_id}/live-status", response_class=HTMLResponse)
|
||||
async def get_nrl_live_status(
|
||||
project_id: str,
|
||||
location_id: str,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Fetch cached status from SLMM for the unit assigned to this NRL and
|
||||
return a compact HTML status card. Used in the NRL overview tab for
|
||||
connected NRLs. Gracefully shows an offline message if SLMM is unreachable.
|
||||
"""
|
||||
import os
|
||||
import httpx
|
||||
|
||||
# Find the assigned unit
|
||||
assignment = db.query(UnitAssignment).filter(
|
||||
and_(
|
||||
UnitAssignment.location_id == location_id,
|
||||
UnitAssignment.status == "active",
|
||||
)
|
||||
).first()
|
||||
|
||||
if not assignment:
|
||||
return templates.TemplateResponse("partials/projects/nrl_live_status.html", {
|
||||
"request": request,
|
||||
"status": None,
|
||||
"error": "No unit assigned",
|
||||
})
|
||||
|
||||
unit = db.query(RosterUnit).filter_by(id=assignment.unit_id).first()
|
||||
if not unit:
|
||||
return templates.TemplateResponse("partials/projects/nrl_live_status.html", {
|
||||
"request": request,
|
||||
"status": None,
|
||||
"error": "Assigned unit not found",
|
||||
})
|
||||
|
||||
slmm_base = os.getenv("SLMM_BASE_URL", "http://localhost:8100")
|
||||
status_data = None
|
||||
error_msg = None
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
resp = await client.get(f"{slmm_base}/api/nl43/{unit.id}/status")
|
||||
if resp.status_code == 200:
|
||||
status_data = resp.json()
|
||||
else:
|
||||
error_msg = f"SLMM returned {resp.status_code}"
|
||||
except Exception as e:
|
||||
error_msg = "SLMM unreachable"
|
||||
|
||||
return templates.TemplateResponse("partials/projects/nrl_live_status.html", {
|
||||
"request": request,
|
||||
"unit": unit,
|
||||
"status": status_data,
|
||||
"error": error_msg,
|
||||
})
|
||||
|
||||
@@ -23,12 +23,18 @@ import io
|
||||
from backend.utils.timezone import utc_to_local, format_local_datetime
|
||||
|
||||
from backend.database import get_db
|
||||
from fastapi import UploadFile, File
|
||||
import zipfile
|
||||
import hashlib
|
||||
import pathlib as _pathlib
|
||||
|
||||
from backend.models import (
|
||||
Project,
|
||||
ProjectType,
|
||||
MonitoringLocation,
|
||||
UnitAssignment,
|
||||
MonitoringSession,
|
||||
DataFile,
|
||||
ScheduledAction,
|
||||
RecurringSchedule,
|
||||
RosterUnit,
|
||||
@@ -2697,6 +2703,301 @@ async def generate_combined_excel_report(
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Project-level bulk upload (entire date-folder structure)
|
||||
# ============================================================================
|
||||
|
||||
def _bulk_parse_rnh(content: bytes) -> dict:
|
||||
"""Parse a Rion .rnh metadata file for session start/stop times and device info."""
|
||||
result = {}
|
||||
try:
|
||||
text = content.decode("utf-8", errors="replace")
|
||||
for line in text.splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("["):
|
||||
continue
|
||||
if "," in line:
|
||||
key, _, value = line.partition(",")
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
mapping = {
|
||||
"Serial Number": "serial_number",
|
||||
"Store Name": "store_name",
|
||||
"Index Number": "index_number",
|
||||
"Measurement Start Time": "start_time_str",
|
||||
"Measurement Stop Time": "stop_time_str",
|
||||
"Total Measurement Time": "total_time_str",
|
||||
}
|
||||
if key in mapping:
|
||||
result[mapping[key]] = value
|
||||
except Exception:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
def _bulk_parse_datetime(s: str):
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return datetime.strptime(s.strip(), "%Y/%m/%d %H:%M:%S")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _bulk_classify_file(filename: str) -> str:
|
||||
name = filename.lower()
|
||||
if name.endswith(".rnh"):
|
||||
return "log"
|
||||
if name.endswith(".rnd"):
|
||||
return "measurement"
|
||||
if name.endswith(".mp3") or name.endswith(".wav") or name.endswith(".m4a"):
|
||||
return "audio"
|
||||
if name.endswith(".xlsx") or name.endswith(".xls") or name.endswith(".csv"):
|
||||
return "data"
|
||||
return "data"
|
||||
|
||||
|
||||
# Files we skip entirely — already-converted outputs that don't need re-importing
|
||||
_BULK_SKIP_EXTENSIONS = {".xlsx", ".xls"}
|
||||
|
||||
|
||||
@router.post("/{project_id}/upload-all")
|
||||
async def upload_all_project_data(
|
||||
project_id: str,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Bulk-import an entire structured data folder selected via webkitdirectory.
|
||||
|
||||
Expected folder structure (flexible depth):
|
||||
[date_folder]/[NRL_name]/[Auto_####]/ ← files here
|
||||
-- OR --
|
||||
[NRL_name]/[Auto_####]/ ← files here (no date wrapper)
|
||||
-- OR --
|
||||
[date_folder]/[NRL_name]/ ← files directly in NRL folder
|
||||
|
||||
Each leaf folder group of .rnd/.rnh files becomes one MonitoringSession.
|
||||
NRL folder names are matched case-insensitively to MonitoringLocation.name.
|
||||
.mp3 files are stored as audio. .xlsx/.xls are skipped (already-converted).
|
||||
Unmatched folders are reported but don't cause failure.
|
||||
"""
|
||||
form = await request.form()
|
||||
|
||||
# Collect (relative_path, filename, bytes) for every uploaded file.
|
||||
# The JS sends each file as "files" and its webkitRelativePath as "paths".
|
||||
from collections import defaultdict
|
||||
|
||||
uploaded_files = form.getlist("files")
|
||||
uploaded_paths = form.getlist("paths")
|
||||
|
||||
if not uploaded_files:
|
||||
raise HTTPException(status_code=400, detail="No files received.")
|
||||
|
||||
if len(uploaded_paths) != len(uploaded_files):
|
||||
# Fallback: use bare filename if paths weren't sent
|
||||
uploaded_paths = [f.filename for f in uploaded_files]
|
||||
|
||||
project = db.query(Project).filter_by(id=project_id).first()
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# Load all sound monitoring locations for this project
|
||||
locations = db.query(MonitoringLocation).filter_by(
|
||||
project_id=project_id,
|
||||
location_type="sound",
|
||||
).all()
|
||||
|
||||
# Build a case-insensitive name → location map
|
||||
loc_by_name: dict[str, MonitoringLocation] = {
|
||||
loc.name.strip().lower(): loc for loc in locations
|
||||
}
|
||||
|
||||
def _normalize(s: str) -> str:
|
||||
"""Lowercase, strip spaces/hyphens/underscores for fuzzy comparison."""
|
||||
return s.lower().replace(" ", "").replace("-", "").replace("_", "")
|
||||
|
||||
# Pre-build normalized keys for fuzzy matching
|
||||
loc_by_normalized: dict[str, MonitoringLocation] = {
|
||||
_normalize(loc.name): loc for loc in locations
|
||||
}
|
||||
|
||||
def _find_location_for_path(path: str):
|
||||
"""
|
||||
Walk path components from right and return first matching location.
|
||||
Tries exact match first, then normalized (strips spaces/hyphens/underscores),
|
||||
then checks if the location name *starts with* the normalized folder name.
|
||||
e.g. folder "NRL 1" matches location "NRL1 - Test Location"
|
||||
"""
|
||||
components = path.replace("\\", "/").split("/")
|
||||
for comp in reversed(components):
|
||||
# Exact match
|
||||
key = comp.strip().lower()
|
||||
if key in loc_by_name:
|
||||
return loc_by_name[key]
|
||||
# Normalized match ("NRL 1" == "NRL1")
|
||||
norm = _normalize(comp)
|
||||
if norm in loc_by_normalized:
|
||||
return loc_by_normalized[norm]
|
||||
# Prefix match: location name starts with the folder component
|
||||
# e.g. "NRL1" matches "NRL1 - Test Location"
|
||||
for loc_norm, loc in loc_by_normalized.items():
|
||||
if loc_norm.startswith(norm) or norm.startswith(loc_norm):
|
||||
return loc
|
||||
return None
|
||||
|
||||
def _session_group_key(parts: tuple) -> str:
|
||||
"""
|
||||
Determine the grouping key for a file path.
|
||||
Files inside Auto_####/Auto_Leq/ or Auto_####/Auto_Lp_01/ are collapsed
|
||||
up to their Auto_#### parent so they all land in the same session.
|
||||
"""
|
||||
# Find the deepest Auto_#### component (case-insensitive)
|
||||
auto_idx = None
|
||||
for i, p in enumerate(parts):
|
||||
if p.lower().startswith("auto_") and not p.lower().startswith("auto_leq") and not p.lower().startswith("auto_lp"):
|
||||
auto_idx = i
|
||||
if auto_idx is not None:
|
||||
# Group key = everything up to and including Auto_####
|
||||
return "/".join(parts[:auto_idx + 1])
|
||||
# Fallback: use the immediate parent folder
|
||||
return "/".join(parts[:-1]) if len(parts) > 1 else ""
|
||||
|
||||
# --- Group files by session key ---
|
||||
groups: dict[str, list[tuple[str, bytes]]] = defaultdict(list)
|
||||
|
||||
for rel_path, uf in zip(uploaded_paths, uploaded_files):
|
||||
rel_path = rel_path.replace("\\", "/").strip("/")
|
||||
parts = _pathlib.PurePosixPath(rel_path).parts
|
||||
if not parts:
|
||||
continue
|
||||
fname = parts[-1]
|
||||
# Skip already-converted Excel exports
|
||||
if _pathlib.PurePosixPath(fname).suffix.lower() in _BULK_SKIP_EXTENSIONS:
|
||||
continue
|
||||
group_key = _session_group_key(parts)
|
||||
data = await uf.read()
|
||||
groups[group_key].append((fname, data))
|
||||
|
||||
# Aggregate by (location_id, date_label) so each Auto_#### group is one session
|
||||
# key: (location_id or None, group_path)
|
||||
session_results = []
|
||||
unmatched_paths = []
|
||||
total_files = 0
|
||||
total_sessions = 0
|
||||
|
||||
for group_path, file_list in sorted(groups.items()):
|
||||
matched_loc = _find_location_for_path(group_path)
|
||||
|
||||
if matched_loc is None:
|
||||
unmatched_paths.append(group_path)
|
||||
continue
|
||||
|
||||
# Parse .rnh if present in this group
|
||||
rnh_meta = {}
|
||||
for fname, fbytes in file_list:
|
||||
if fname.lower().endswith(".rnh"):
|
||||
rnh_meta = _bulk_parse_rnh(fbytes)
|
||||
break
|
||||
|
||||
started_at = _bulk_parse_datetime(rnh_meta.get("start_time_str")) or datetime.utcnow()
|
||||
stopped_at = _bulk_parse_datetime(rnh_meta.get("stop_time_str"))
|
||||
duration_seconds = None
|
||||
if started_at and stopped_at:
|
||||
duration_seconds = int((stopped_at - started_at).total_seconds())
|
||||
|
||||
store_name = rnh_meta.get("store_name", "")
|
||||
serial_number = rnh_meta.get("serial_number", "")
|
||||
index_number = rnh_meta.get("index_number", "")
|
||||
|
||||
session_id = str(uuid.uuid4())
|
||||
monitoring_session = MonitoringSession(
|
||||
id=session_id,
|
||||
project_id=project_id,
|
||||
location_id=matched_loc.id,
|
||||
unit_id=None,
|
||||
session_type="sound",
|
||||
started_at=started_at,
|
||||
stopped_at=stopped_at,
|
||||
duration_seconds=duration_seconds,
|
||||
status="completed",
|
||||
session_metadata=json.dumps({
|
||||
"source": "bulk_upload",
|
||||
"group_path": group_path,
|
||||
"store_name": store_name,
|
||||
"serial_number": serial_number,
|
||||
"index_number": index_number,
|
||||
}),
|
||||
)
|
||||
db.add(monitoring_session)
|
||||
db.commit()
|
||||
db.refresh(monitoring_session)
|
||||
|
||||
# Write files
|
||||
output_dir = _pathlib.Path("data/Projects") / project_id / session_id
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
leq_count = 0
|
||||
lp_count = 0
|
||||
group_file_count = 0
|
||||
|
||||
for fname, fbytes in file_list:
|
||||
file_type = _bulk_classify_file(fname)
|
||||
fname_lower = fname.lower()
|
||||
if fname_lower.endswith(".rnd"):
|
||||
if "_leq_" in fname_lower:
|
||||
leq_count += 1
|
||||
elif "_lp" in fname_lower:
|
||||
lp_count += 1
|
||||
|
||||
dest = output_dir / fname
|
||||
dest.write_bytes(fbytes)
|
||||
checksum = hashlib.sha256(fbytes).hexdigest()
|
||||
rel_path = str(dest.relative_to("data"))
|
||||
|
||||
data_file = DataFile(
|
||||
id=str(uuid.uuid4()),
|
||||
session_id=session_id,
|
||||
file_path=rel_path,
|
||||
file_type=file_type,
|
||||
file_size_bytes=len(fbytes),
|
||||
downloaded_at=datetime.utcnow(),
|
||||
checksum=checksum,
|
||||
file_metadata=json.dumps({
|
||||
"source": "bulk_upload",
|
||||
"original_filename": fname,
|
||||
"group_path": group_path,
|
||||
"store_name": store_name,
|
||||
}),
|
||||
)
|
||||
db.add(data_file)
|
||||
group_file_count += 1
|
||||
|
||||
db.commit()
|
||||
total_files += group_file_count
|
||||
total_sessions += 1
|
||||
|
||||
session_results.append({
|
||||
"location_name": matched_loc.name,
|
||||
"location_id": matched_loc.id,
|
||||
"session_id": session_id,
|
||||
"group_path": group_path,
|
||||
"files": group_file_count,
|
||||
"leq_files": leq_count,
|
||||
"lp_files": lp_count,
|
||||
"store_name": store_name,
|
||||
"started_at": started_at.isoformat() if started_at else None,
|
||||
})
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"sessions_created": total_sessions,
|
||||
"files_imported": total_files,
|
||||
"unmatched_folders": unmatched_paths,
|
||||
"sessions": session_results,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/types/list", response_class=HTMLResponse)
|
||||
async def get_project_types(request: Request, db: Session = Depends(get_db)):
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user