diff --git a/CHANGELOG.md b/CHANGELOG.md index afc34f5..97335e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,33 @@ All notable changes to Terra-View will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.9.3] - 2026-03-28 + +### Added +- **Monitoring Session Detail Page**: New dedicated page for each session showing session info, data files (with View/Report/Download actions), an editable session panel, and report actions. +- **Session Calendar with Gantt Bars**: Monthly calendar view below the session list, showing each session as a Gantt-style bar. The dim bar represents the full device on/off window; the bright bar highlights the effective recording window. Bars extend edge-to-edge across day cells for sessions spanning midnight. +- **Configurable Period Windows**: Sessions now store `period_start_hour` and `period_end_hour` to define the exact hours that count toward reports, replacing hardcoded day/night defaults. The session edit panel shows a "Required Recording Window" section with a live preview (e.g. "7:00 AM → 7:00 PM") and a Defaults button that auto-fills based on period type. +- **Report Date Field**: Sessions can now store an explicit `report_date` to override the automatic target-date heuristic — useful when a device ran across multiple days but only one specific day's data is needed for the report. +- **Effective Window on Session Info**: Session detail and session cards now show an "Effective" row displaying the computed recording window dates and times in local time. +- **Vibration Project Redesign**: Vibration project detail page is stripped back to project details and monitoring locations only. Each location supports assigning a seismograph and optional modem. Sound-specific tabs (Schedules, Sessions, Data Files, Assigned Units) are hidden for vibration projects. +- **Modem Assignment on Locations**: Vibration monitoring locations now support an optional paired modem alongside the seismograph. The swap endpoint handles both assignments atomically, updating bidirectional pairing fields on both units. +- **Available Modems Endpoint**: New `GET /api/projects/{project_id}/available-modems` endpoint returning all deployed, non-retired modems for use in assignment dropdowns. + +### Fixed +- **Active Assignment Checks**: Unified all `UnitAssignment` "active" checks from `status == "active"` to `assigned_until IS NULL` throughout `project_locations.py` and `projects.py` for consistency with the canonical active definition. + +### Changed +- **Sound-Only Endpoint Guards**: FTP browser, RND viewer, Excel report generation, combined report wizard, and data upload endpoints now return HTTP 400 if called on a non-sound-monitoring project. + +### Migration Notes +Run on each database before deploying: +```bash +docker compose exec terra-view python3 backend/migrate_add_session_period_hours.py +docker compose exec terra-view python3 backend/migrate_add_session_report_date.py +``` + +--- + ## [0.9.2] - 2026-03-27 ### Added diff --git a/README.md b/README.md index 93865ae..d5c7557 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Terra-View v0.9.2 +# Terra-View v0.9.3 Backend API and HTMX-powered web interface for managing a mixed fleet of seismographs and field modems. Track deployments, monitor health in real time, merge roster intent with incoming telemetry, and control your fleet through a unified database and dashboard. ## Features diff --git a/backend/main.py b/backend/main.py index 4d9cc1e..89cca81 100644 --- a/backend/main.py +++ b/backend/main.py @@ -30,7 +30,7 @@ Base.metadata.create_all(bind=engine) ENVIRONMENT = os.getenv("ENVIRONMENT", "production") # Initialize FastAPI app -VERSION = "0.9.2" +VERSION = "0.9.3" if ENVIRONMENT == "development": _build = os.getenv("BUILD_NUMBER", "0") if _build and _build != "0": @@ -355,8 +355,11 @@ async def nrl_detail_page( ).first() assigned_unit = None + assigned_modem = None if assignment: assigned_unit = db.query(RosterUnit).filter_by(id=assignment.unit_id).first() + if assigned_unit and assigned_unit.deployed_with_modem_id: + assigned_modem = db.query(RosterUnit).filter_by(id=assigned_unit.deployed_with_modem_id).first() # Get session count session_count = db.query(MonitoringSession).filter_by(location_id=location_id).count() @@ -393,6 +396,7 @@ async def nrl_detail_page( "location": location, "assignment": assignment, "assigned_unit": assigned_unit, + "assigned_modem": assigned_modem, "session_count": session_count, "file_count": file_count, "active_session": active_session, diff --git a/backend/migrate_add_session_period_hours.py b/backend/migrate_add_session_period_hours.py new file mode 100644 index 0000000..5cfb0dc --- /dev/null +++ b/backend/migrate_add_session_period_hours.py @@ -0,0 +1,42 @@ +""" +Migration: add period_start_hour and period_end_hour to monitoring_sessions. + +Run once: + python backend/migrate_add_session_period_hours.py + +Or inside the container: + docker exec terra-view python3 backend/migrate_add_session_period_hours.py +""" + +import sys +import os +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from backend.database import engine +from sqlalchemy import text + +def run(): + with engine.connect() as conn: + # Check which columns already exist + result = conn.execute(text("PRAGMA table_info(monitoring_sessions)")) + existing = {row[1] for row in result} + + added = [] + for col, definition in [ + ("period_start_hour", "INTEGER"), + ("period_end_hour", "INTEGER"), + ]: + if col not in existing: + conn.execute(text(f"ALTER TABLE monitoring_sessions ADD COLUMN {col} {definition}")) + added.append(col) + else: + print(f" Column '{col}' already exists — skipping.") + + conn.commit() + + if added: + print(f" Added columns: {', '.join(added)}") + print("Migration complete.") + +if __name__ == "__main__": + run() diff --git a/backend/migrate_add_session_report_date.py b/backend/migrate_add_session_report_date.py new file mode 100644 index 0000000..3b17ac7 --- /dev/null +++ b/backend/migrate_add_session_report_date.py @@ -0,0 +1,41 @@ +""" +Migration: add report_date to monitoring_sessions. + +Run once: + python backend/migrate_add_session_report_date.py + +Or inside the container: + docker exec terra-view-terra-view-1 python3 backend/migrate_add_session_report_date.py +""" + +import sys +import os +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from backend.database import engine +from sqlalchemy import text + +def run(): + with engine.connect() as conn: + # Check which columns already exist + result = conn.execute(text("PRAGMA table_info(monitoring_sessions)")) + existing = {row[1] for row in result} + + added = [] + for col, definition in [ + ("report_date", "DATE"), + ]: + if col not in existing: + conn.execute(text(f"ALTER TABLE monitoring_sessions ADD COLUMN {col} {definition}")) + added.append(col) + else: + print(f" Column '{col}' already exists — skipping.") + + conn.commit() + + if added: + print(f" Added columns: {', '.join(added)}") + print("Migration complete.") + +if __name__ == "__main__": + run() diff --git a/backend/models.py b/backend/models.py index bb0ca10..b45f12f 100644 --- a/backend/models.py +++ b/backend/models.py @@ -303,6 +303,17 @@ class MonitoringSession(Base): # weekday_day | weekday_night | weekend_day | weekend_night period_type = Column(String, nullable=True) + # Effective monitoring window (hours 0–23). Night sessions cross midnight + # (period_end_hour < period_start_hour). NULL = no filtering applied. + # e.g. Day: start=7, end=19 Night: start=19, end=7 + period_start_hour = Column(Integer, nullable=True) + period_end_hour = Column(Integer, nullable=True) + + # For day sessions: the specific calendar date to use for report filtering. + # Overrides the automatic "last date with daytime rows" heuristic. + # Null = use heuristic. + report_date = Column(Date, nullable=True) + # Snapshot of device configuration at recording time session_metadata = Column(Text, nullable=True) # JSON diff --git a/backend/routers/project_locations.py b/backend/routers/project_locations.py index 44fcdd5..efa21a8 100644 --- a/backend/routers/project_locations.py +++ b/backend/routers/project_locations.py @@ -31,10 +31,24 @@ from backend.models import ( DataFile, ) from backend.templates_config import templates +from backend.utils.timezone import local_to_utc router = APIRouter(prefix="/api/projects/{project_id}", tags=["project-locations"]) +# ============================================================================ +# Shared helpers +# ============================================================================ + +def _require_sound_project(project) -> None: + """Raise 400 if the project is not a sound_monitoring project.""" + if not project or project.project_type_id != "sound_monitoring": + raise HTTPException( + status_code=400, + detail="This feature is only available for Sound Monitoring projects.", + ) + + # ============================================================================ # Session period helpers # ============================================================================ @@ -98,11 +112,11 @@ async def get_project_locations( # Enrich with assignment info locations_data = [] for location in locations: - # Get active assignment + # Get active assignment (active = assigned_until IS NULL) assignment = db.query(UnitAssignment).filter( and_( UnitAssignment.location_id == location.id, - UnitAssignment.status == "active", + UnitAssignment.assigned_until == None, ) ).first() @@ -258,11 +272,11 @@ async def delete_location( if not location: raise HTTPException(status_code=404, detail="Location not found") - # Check if location has active assignments + # Check if location has active assignments (active = assigned_until IS NULL) active_assignments = db.query(UnitAssignment).filter( and_( UnitAssignment.location_id == location_id, - UnitAssignment.status == "active", + UnitAssignment.assigned_until == None, ) ).count() @@ -353,18 +367,18 @@ async def assign_unit_to_location( detail=f"Unit type '{unit.device_type}' does not match location type '{location.location_type}'", ) - # Check if location already has an active assignment + # Check if location already has an active assignment (active = assigned_until IS NULL) existing_assignment = db.query(UnitAssignment).filter( and_( UnitAssignment.location_id == location_id, - UnitAssignment.status == "active", + UnitAssignment.assigned_until == None, ) ).first() if existing_assignment: raise HTTPException( status_code=400, - detail=f"Location already has an active unit assignment ({existing_assignment.unit_id}). Unassign first.", + detail=f"Location already has an active unit assignment ({existing_assignment.unit_id}). Use swap to replace it.", ) # Create new assignment @@ -433,10 +447,120 @@ async def unassign_unit( return {"success": True, "message": "Unit unassigned successfully"} +@router.post("/locations/{location_id}/swap") +async def swap_unit_on_location( + project_id: str, + location_id: str, + request: Request, + db: Session = Depends(get_db), +): + """ + Swap the unit assigned to a vibration monitoring location. + Ends the current active assignment (if any), creates a new one, + and optionally updates modem pairing on the seismograph. + Works for first-time assignments too (no current assignment = just create). + """ + location = db.query(MonitoringLocation).filter_by( + id=location_id, + project_id=project_id, + ).first() + if not location: + raise HTTPException(status_code=404, detail="Location not found") + + form_data = await request.form() + unit_id = form_data.get("unit_id") + modem_id = form_data.get("modem_id") or None + notes = form_data.get("notes") or None + + if not unit_id: + raise HTTPException(status_code=400, detail="unit_id is required") + + # Validate new unit + unit = db.query(RosterUnit).filter_by(id=unit_id).first() + if not unit: + raise HTTPException(status_code=404, detail="Unit not found") + + expected_device_type = "slm" if location.location_type == "sound" else "seismograph" + if unit.device_type != expected_device_type: + raise HTTPException( + status_code=400, + detail=f"Unit type '{unit.device_type}' does not match location type '{location.location_type}'", + ) + + # End current active assignment if one exists (active = assigned_until IS NULL) + current = db.query(UnitAssignment).filter( + and_( + UnitAssignment.location_id == location_id, + UnitAssignment.assigned_until == None, + ) + ).first() + if current: + current.assigned_until = datetime.utcnow() + current.status = "completed" + + # Create new assignment + new_assignment = UnitAssignment( + id=str(uuid.uuid4()), + unit_id=unit_id, + location_id=location_id, + project_id=project_id, + device_type=unit.device_type, + assigned_until=None, + status="active", + notes=notes, + ) + db.add(new_assignment) + + # Update modem pairing on the seismograph if modem provided + if modem_id: + modem = db.query(RosterUnit).filter_by(id=modem_id, device_type="modem").first() + if not modem: + raise HTTPException(status_code=404, detail=f"Modem '{modem_id}' not found") + unit.deployed_with_modem_id = modem_id + modem.deployed_with_unit_id = unit_id + else: + # Clear modem pairing if not provided + unit.deployed_with_modem_id = None + + db.commit() + + return JSONResponse({ + "success": True, + "assignment_id": new_assignment.id, + "message": f"Unit '{unit_id}' assigned to '{location.name}'" + (f" with modem '{modem_id}'" if modem_id else ""), + }) + + # ============================================================================ # Available Units for Assignment # ============================================================================ +@router.get("/available-modems", response_class=JSONResponse) +async def get_available_modems( + project_id: str, + db: Session = Depends(get_db), +): + """ + Get all deployed, non-retired modems for the modem assignment dropdown. + """ + modems = db.query(RosterUnit).filter( + and_( + RosterUnit.device_type == "modem", + RosterUnit.deployed == True, + RosterUnit.retired == False, + ) + ).order_by(RosterUnit.id).all() + + return [ + { + "id": m.id, + "hardware_model": m.hardware_model, + "ip_address": m.ip_address, + } + for m in modems + ] + + @router.get("/available-units", response_class=JSONResponse) async def get_available_units( project_id: str, @@ -459,9 +583,9 @@ async def get_available_units( ) ).all() - # Filter out units that already have active assignments + # Filter out units that already have active assignments (active = assigned_until IS NULL) assigned_unit_ids = db.query(UnitAssignment.unit_id).filter( - UnitAssignment.status == "active" + UnitAssignment.assigned_until == None ).distinct().all() assigned_unit_ids = [uid[0] for uid in assigned_unit_ids] @@ -637,6 +761,9 @@ async def upload_nrl_data( from datetime import datetime # Verify project and location exist + project = db.query(Project).filter_by(id=project_id).first() + _require_sound_project(project) + location = db.query(MonitoringLocation).filter_by( id=location_id, project_id=project_id ).first() @@ -698,8 +825,15 @@ async def upload_nrl_data( rnh_meta = _parse_rnh(fbytes) break - started_at = _parse_rnh_datetime(rnh_meta.get("start_time_str")) or datetime.utcnow() - stopped_at = _parse_rnh_datetime(rnh_meta.get("stop_time_str")) + # RNH files store local time (no UTC offset). Use local values for period + # classification / label generation, then convert to UTC for DB storage so + # the local_datetime Jinja filter displays the correct time. + started_at_local = _parse_rnh_datetime(rnh_meta.get("start_time_str")) or datetime.utcnow() + stopped_at_local = _parse_rnh_datetime(rnh_meta.get("stop_time_str")) + + started_at = local_to_utc(started_at_local) + stopped_at = local_to_utc(stopped_at_local) if stopped_at_local else None + duration_seconds = None if started_at and stopped_at: duration_seconds = int((stopped_at - started_at).total_seconds()) @@ -709,8 +843,9 @@ async def upload_nrl_data( index_number = rnh_meta.get("index_number", "") # --- Step 3: Create MonitoringSession --- - period_type = _derive_period_type(started_at) if started_at else None - session_label = _build_session_label(started_at, location.name, period_type) if started_at else None + # Use local times for period/label so classification reflects the clock at the site. + period_type = _derive_period_type(started_at_local) if started_at_local else None + session_label = _build_session_label(started_at_local, location.name, period_type) if started_at_local else None session_id = str(uuid.uuid4()) monitoring_session = MonitoringSession( @@ -815,15 +950,18 @@ async def get_nrl_live_status( Fetch cached status from SLMM for the unit assigned to this NRL and return a compact HTML status card. Used in the NRL overview tab for connected NRLs. Gracefully shows an offline message if SLMM is unreachable. + Sound Monitoring projects only. """ import os import httpx - # Find the assigned unit + _require_sound_project(db.query(Project).filter_by(id=project_id).first()) + + # Find the assigned unit (active = assigned_until IS NULL) assignment = db.query(UnitAssignment).filter( and_( UnitAssignment.location_id == location_id, - UnitAssignment.status == "active", + UnitAssignment.assigned_until == None, ) ).first() diff --git a/backend/routers/projects.py b/backend/routers/projects.py index ee58eb5..43c4d95 100644 --- a/backend/routers/projects.py +++ b/backend/routers/projects.py @@ -20,7 +20,7 @@ import json import logging import io -from backend.utils.timezone import utc_to_local, format_local_datetime +from backend.utils.timezone import utc_to_local, format_local_datetime, local_to_utc from backend.database import get_db from fastapi import UploadFile, File @@ -45,6 +45,21 @@ router = APIRouter(prefix="/api/projects", tags=["projects"]) logger = logging.getLogger(__name__) +# ============================================================================ +# Shared helpers +# ============================================================================ + +def _require_sound_project(project: Project) -> None: + """Raise 400 if the project is not a sound_monitoring project. + Call this at the top of any endpoint that only makes sense for sound projects + (report generation, FTP browser, RND file viewer, etc.).""" + if not project or project.project_type_id != "sound_monitoring": + raise HTTPException( + status_code=400, + detail="This feature is only available for Sound Monitoring projects.", + ) + + # ============================================================================ # RND file normalization — maps AU2 (older Rion) column names to the NL-43 # equivalents so report generation and the web viewer work for both formats. @@ -398,11 +413,11 @@ async def get_projects_list( project_id=project.id ).scalar() - # Count assigned units + # Count assigned units (active = assigned_until IS NULL) unit_count = db.query(func.count(UnitAssignment.id)).filter( and_( UnitAssignment.project_id == project.id, - UnitAssignment.status == "active", + UnitAssignment.assigned_until == None, ) ).scalar() @@ -806,11 +821,11 @@ async def get_project_dashboard( # Get locations locations = db.query(MonitoringLocation).filter_by(project_id=project_id).all() - # Get assigned units with details + # Get assigned units with details (active = assigned_until IS NULL) assignments = db.query(UnitAssignment).filter( and_( UnitAssignment.project_id == project_id, - UnitAssignment.status == "active", + UnitAssignment.assigned_until == None, ) ).all() @@ -899,11 +914,11 @@ async def get_project_units( """ from backend.models import DataFile - # Get all assignments for this project + # Get all assignments for this project (active = assigned_until IS NULL) assignments = db.query(UnitAssignment).filter( and_( UnitAssignment.project_id == project_id, - UnitAssignment.status == "active", + UnitAssignment.assigned_until == None, ) ).all() @@ -1128,7 +1143,7 @@ async def get_project_sessions( sessions = query.order_by(MonitoringSession.started_at.desc()).all() - # Enrich with unit and location details + # Enrich with unit, location, and effective time window details sessions_data = [] for session in sessions: unit = None @@ -1139,10 +1154,34 @@ async def get_project_sessions( if session.location_id: location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() + # Compute "Effective: date time → date time" string when period hours are set + effective_range = None + if session.period_start_hour is not None and session.period_end_hour is not None and session.started_at: + from datetime import date as _date + local_start = utc_to_local(session.started_at) + start_day = session.report_date if session.report_date else local_start.date() + sh = session.period_start_hour + eh = session.period_end_hour + + def _fmt_h(h): + ampm = "AM" if h < 12 else "PM" + h12 = h % 12 or 12 + return f"{h12}:00 {ampm}" + + start_str = f"{start_day.month}/{start_day.day} {_fmt_h(sh)}" + if eh > sh: # same calendar day + end_day = start_day + else: # crosses midnight + from datetime import timedelta as _td + end_day = start_day + _td(days=1) + end_str = f"{end_day.month}/{end_day.day} {_fmt_h(eh)}" + effective_range = f"{start_str} → {end_str}" + sessions_data.append({ "session": session, "unit": unit, "location": location, + "effective_range": effective_range, }) return templates.TemplateResponse("partials/projects/session_list.html", { @@ -1152,6 +1191,173 @@ async def get_project_sessions( }) +@router.get("/{project_id}/sessions-calendar", response_class=HTMLResponse) +async def get_sessions_calendar( + project_id: str, + request: Request, + db: Session = Depends(get_db), + month: Optional[int] = Query(None), + year: Optional[int] = Query(None), +): + """ + Monthly calendar view of monitoring sessions. + Color-coded by NRL location. Returns HTML partial. + """ + from calendar import monthrange + from datetime import date as _date, timedelta as _td + + # Default to current month + now_local = utc_to_local(datetime.utcnow()) + if not year: + year = now_local.year + if not month: + month = now_local.month + + # Clamp month to valid range + month = max(1, min(12, month)) + + # Load all sessions for this project + sessions = db.query(MonitoringSession).filter_by(project_id=project_id).all() + + # Build location -> color map (deterministic) + PALETTE = [ + "#f97316", "#3b82f6", "#10b981", "#8b5cf6", + "#ec4899", "#14b8a6", "#f59e0b", "#6366f1", + "#ef4444", "#84cc16", + ] + loc_ids = sorted({s.location_id for s in sessions if s.location_id}) + loc_color = {lid: PALETTE[i % len(PALETTE)] for i, lid in enumerate(loc_ids)} + + # Load location names + loc_names = {} + for lid in loc_ids: + loc = db.query(MonitoringLocation).filter_by(id=lid).first() + if loc: + loc_names[lid] = loc.name + + # Build calendar grid bounds first (needed for session spanning logic) + first_day = _date(year, month, 1) + last_day = _date(year, month, monthrange(year, month)[1]) + days_before = (first_day.isoweekday() % 7) + grid_start = first_day - _td(days=days_before) + days_after = 6 - (last_day.isoweekday() % 7) + grid_end = last_day + _td(days=days_after) + + def _period_hours(s): + """Return (start_hour, end_hour) for a session, falling back to period_type defaults.""" + psh, peh = s.period_start_hour, s.period_end_hour + if psh is None or peh is None: + if s.period_type and "night" in s.period_type: + return 19, 7 + if s.period_type and "day" in s.period_type: + return 7, 19 + return psh, peh + + # Build day -> list of gantt segments + day_sessions: dict = {} + for s in sessions: + if not s.started_at: + continue + local_start = utc_to_local(s.started_at) + local_end = utc_to_local(s.stopped_at) if s.stopped_at else now_local + span_start = local_start.date() + span_end = local_end.date() + psh, peh = _period_hours(s) + + cur_d = span_start + while cur_d <= span_end: + if grid_start <= cur_d <= grid_end: + # Device bar bounds (hours 0–24 within this day) + dev_sh = (local_start.hour + local_start.minute / 60.0) if cur_d == span_start else 0.0 + dev_eh = (local_end.hour + local_end.minute / 60.0) if cur_d == span_end else 24.0 + + # Effective window within this day + eff_sh = eff_eh = None + if psh is not None and peh is not None: + if psh < peh: + # Day window e.g. 7→19 + eff_sh, eff_eh = float(psh), float(peh) + else: + # Night window crossing midnight e.g. 19→7 + if cur_d == span_start: + eff_sh, eff_eh = float(psh), 24.0 + else: + eff_sh, eff_eh = 0.0, float(peh) + + # Format tooltip labels + def _fmt_h(h): + hh = int(h) % 24 + mm = int((h % 1) * 60) + suffix = "AM" if hh < 12 else "PM" + return f"{hh % 12 or 12}:{mm:02d} {suffix}" + + if cur_d not in day_sessions: + day_sessions[cur_d] = [] + day_sessions[cur_d].append({ + "session_id": s.id, + "label": s.session_label or f"Session {s.id[:8]}", + "location_id": s.location_id, + "location_name": loc_names.get(s.location_id, "Unknown"), + "color": loc_color.get(s.location_id, "#9ca3af"), + "status": s.status, + "period_type": s.period_type, + # Gantt bar percentages (0–100 scale across 24 hours) + "dev_start_pct": round(dev_sh / 24 * 100, 1), + "dev_width_pct": max(1.5, round((dev_eh - dev_sh) / 24 * 100, 1)), + "eff_start_pct": round(eff_sh / 24 * 100, 1) if eff_sh is not None else None, + "eff_width_pct": max(1.0, round((eff_eh - eff_sh) / 24 * 100, 1)) if eff_sh is not None else None, + "dev_start_label": _fmt_h(dev_sh), + "dev_end_label": _fmt_h(dev_eh), + "eff_start_label": f"{int(psh):02d}:00" if eff_sh is not None else None, + "eff_end_label": f"{int(peh):02d}:00" if eff_sh is not None else None, + }) + cur_d += _td(days=1) + + weeks = [] + cur = grid_start + while cur <= grid_end: + week = [] + for _ in range(7): + week.append({ + "date": cur, + "in_month": cur.month == month, + "is_today": cur == now_local.date(), + "sessions": day_sessions.get(cur, []), + }) + cur += _td(days=1) + weeks.append(week) + + # Prev/next month navigation + prev_month = month - 1 if month > 1 else 12 + prev_year = year if month > 1 else year - 1 + next_month = month + 1 if month < 12 else 1 + next_year = year if month < 12 else year + 1 + + import calendar as _cal + month_name = _cal.month_name[month] + + # Legend: only locations that have sessions this month + used_lids = {s["location_id"] for day in day_sessions.values() for s in day} + legend = [ + {"location_id": lid, "name": loc_names.get(lid, lid[:8]), "color": loc_color[lid]} + for lid in loc_ids if lid in used_lids + ] + + return templates.TemplateResponse("partials/projects/sessions_calendar.html", { + "request": request, + "project_id": project_id, + "weeks": weeks, + "month": month, + "year": year, + "month_name": month_name, + "prev_month": prev_month, + "prev_year": prev_year, + "next_month": next_month, + "next_year": next_year, + "legend": legend, + }) + + @router.get("/{project_id}/ftp-browser", response_class=HTMLResponse) async def get_ftp_browser( project_id: str, @@ -1160,15 +1366,18 @@ async def get_ftp_browser( ): """ Get FTP browser interface for downloading files from assigned SLMs. - Returns HTML partial with FTP browser. + Returns HTML partial with FTP browser. Sound Monitoring projects only. """ from backend.models import DataFile - # Get all assignments for this project + project = db.query(Project).filter_by(id=project_id).first() + _require_sound_project(project) + + # Get all assignments for this project (active = assigned_until IS NULL) assignments = db.query(UnitAssignment).filter( and_( UnitAssignment.project_id == project_id, - UnitAssignment.status == "active", + UnitAssignment.assigned_until == None, ) ).all() @@ -1202,6 +1411,7 @@ async def ftp_download_to_server( """ Download a file from an SLM to the server via FTP. Creates a DataFile record and stores the file in data/Projects/{project_id}/ + Sound Monitoring projects only. """ import httpx import os @@ -1209,6 +1419,8 @@ async def ftp_download_to_server( from pathlib import Path from backend.models import DataFile + _require_sound_project(db.query(Project).filter_by(id=project_id).first()) + data = await request.json() unit_id = data.get("unit_id") remote_path = data.get("remote_path") @@ -1367,12 +1579,15 @@ async def ftp_download_folder_to_server( Download an entire folder from an SLM to the server via FTP. Extracts all files from the ZIP and preserves folder structure. Creates individual DataFile records for each file. + Sound Monitoring projects only. """ import httpx import os import hashlib import zipfile import io + + _require_sound_project(db.query(Project).filter_by(id=project_id).first()) from pathlib import Path from backend.models import DataFile @@ -1800,6 +2015,23 @@ async def delete_session( VALID_PERIOD_TYPES = {"weekday_day", "weekday_night", "weekend_day", "weekend_night"} + +def _derive_period_type(dt: datetime) -> str: + is_weekend = dt.weekday() >= 5 + is_night = dt.hour >= 22 or dt.hour < 7 + if is_weekend: + return "weekend_night" if is_night else "weekend_day" + return "weekday_night" if is_night else "weekday_day" + + +def _build_session_label(dt: datetime, location_name: str, period_type: str) -> str: + day_abbr = dt.strftime("%a") + date_str = f"{dt.month}/{dt.day}" + period_str = {"weekday_day": "Day", "weekday_night": "Night", "weekend_day": "Day", "weekend_night": "Night"}.get(period_type, "") + parts = [p for p in [location_name, f"{day_abbr} {date_str}", period_str] if p] + return " — ".join(parts) + + @router.patch("/{project_id}/sessions/{session_id}") async def patch_session( project_id: str, @@ -1807,13 +2039,53 @@ async def patch_session( data: dict, db: Session = Depends(get_db), ): - """Update session_label and/or period_type on a monitoring session.""" + """Update session fields: started_at, stopped_at, session_label, period_type.""" session = db.query(MonitoringSession).filter_by(id=session_id).first() if not session: raise HTTPException(status_code=404, detail="Session not found") if session.project_id != project_id: raise HTTPException(status_code=403, detail="Session does not belong to this project") + times_changed = False + + if "started_at" in data and data["started_at"]: + try: + local_dt = datetime.fromisoformat(data["started_at"]) + session.started_at = local_to_utc(local_dt) + times_changed = True + except ValueError: + raise HTTPException(status_code=400, detail="Invalid started_at datetime format") + + if "stopped_at" in data: + if data["stopped_at"]: + try: + local_dt = datetime.fromisoformat(data["stopped_at"]) + session.stopped_at = local_to_utc(local_dt) + times_changed = True + except ValueError: + raise HTTPException(status_code=400, detail="Invalid stopped_at datetime format") + else: + session.stopped_at = None + times_changed = True + + if times_changed and session.started_at and session.stopped_at: + delta = session.stopped_at - session.started_at + session.duration_seconds = max(0, int(delta.total_seconds())) + elif times_changed and not session.stopped_at: + session.duration_seconds = None + + # Re-derive period_type and session_label from new started_at unless explicitly provided + if times_changed and session.started_at and "period_type" not in data: + local_start = utc_to_local(session.started_at) + session.period_type = _derive_period_type(local_start) + + if times_changed and session.started_at and "session_label" not in data: + from backend.models import MonitoringLocation + location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() + location_name = location.name if location else "" + local_start = utc_to_local(session.started_at) + session.session_label = _build_session_label(local_start, location_name, session.period_type or "") + if "session_label" in data: session.session_label = str(data["session_label"]).strip() or None if "period_type" in data: @@ -1822,8 +2094,111 @@ async def patch_session( raise HTTPException(status_code=400, detail=f"Invalid period_type. Must be one of: {', '.join(sorted(VALID_PERIOD_TYPES))}") session.period_type = pt or None + # Configurable period window (0–23 integers; null = no filter) + for field in ("period_start_hour", "period_end_hour"): + if field in data: + val = data[field] + if val is None or val == "": + setattr(session, field, None) + else: + try: + h = int(val) + if not (0 <= h <= 23): + raise ValueError + setattr(session, field, h) + except (ValueError, TypeError): + raise HTTPException(status_code=400, detail=f"{field} must be an integer 0–23 or null") + + if "report_date" in data: + val = data["report_date"] + if val is None or val == "": + session.report_date = None + else: + try: + from datetime import date as _date + session.report_date = _date.fromisoformat(str(val)) + except ValueError: + raise HTTPException(status_code=400, detail="Invalid report_date format. Use YYYY-MM-DD.") + db.commit() - return JSONResponse({"status": "success", "session_label": session.session_label, "period_type": session.period_type}) + return JSONResponse({ + "status": "success", + "session_label": session.session_label, + "period_type": session.period_type, + "period_start_hour": session.period_start_hour, + "period_end_hour": session.period_end_hour, + "report_date": session.report_date.isoformat() if session.report_date else None, + }) + + +@router.get("/{project_id}/sessions/{session_id}/detail", response_class=HTMLResponse) +async def view_session_detail( + request: Request, + project_id: str, + session_id: str, + db: Session = Depends(get_db), +): + """ + Session detail page: shows files, editable session info, data preview, and report actions. + """ + from backend.models import DataFile + from pathlib import Path + + project = db.query(Project).filter_by(id=project_id).first() + if not project: + raise HTTPException(status_code=404, detail="Project not found") + _require_sound_project(project) + + session = db.query(MonitoringSession).filter_by(id=session_id, project_id=project_id).first() + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None + unit = db.query(RosterUnit).filter_by(id=session.unit_id).first() if session.unit_id else None + + # Load all data files for this session + files = db.query(DataFile).filter_by(session_id=session_id).order_by(DataFile.created_at).all() + + # Compute effective time range string for display + effective_range = None + if session.period_start_hour is not None and session.period_end_hour is not None and session.started_at: + local_start = utc_to_local(session.started_at) + start_day = session.report_date if session.report_date else local_start.date() + sh = session.period_start_hour + eh = session.period_end_hour + def _fmt_h(h): + ampm = "AM" if h < 12 else "PM" + h12 = h % 12 or 12 + return f"{h12}:00 {ampm}" + start_str = f"{start_day.month}/{start_day.day} {_fmt_h(sh)}" + if eh > sh: + end_day = start_day + else: + from datetime import timedelta as _td + end_day = start_day + _td(days=1) + end_str = f"{end_day.month}/{end_day.day} {_fmt_h(eh)}" + effective_range = f"{start_str} → {end_str}" + + # Parse session_metadata if present + session_meta = {} + if session.session_metadata: + try: + session_meta = json.loads(session.session_metadata) + except Exception: + pass + + return templates.TemplateResponse("session_detail.html", { + "request": request, + "project": project, + "project_id": project_id, + "session": session, + "location": location, + "unit": unit, + "files": files, + "effective_range": effective_range, + "session_meta": session_meta, + "report_date": session.report_date.isoformat() if session.report_date else "", + }) @router.get("/{project_id}/files/{file_id}/view-rnd", response_class=HTMLResponse) @@ -1858,6 +2233,7 @@ async def view_rnd_file( # Get project info project = db.query(Project).filter_by(id=project_id).first() + _require_sound_project(project) # Get location info if available location = None @@ -1889,6 +2265,8 @@ async def view_rnd_file( "metadata": metadata, "filename": file_path.name, "is_leq": _is_leq_file(str(file_record.file_path), _peek_rnd_headers(file_path)), + "period_start_hour": session.period_start_hour, + "period_end_hour": session.period_end_hour, }) @@ -1901,12 +2279,15 @@ async def get_rnd_data( """ Get parsed RND file data as JSON. Returns the measurement data for charts and tables. + Sound Monitoring projects only. """ from backend.models import DataFile from pathlib import Path import csv import io + _require_sound_project(db.query(Project).filter_by(id=project_id).first()) + # Get the file record file_record = db.query(DataFile).filter_by(id=file_id).first() if not file_record: @@ -1995,6 +2376,8 @@ async def get_rnd_data( "summary": summary, "headers": summary["headers"], "data": rows, + "period_start_hour": session.period_start_hour, + "period_end_hour": session.period_end_hour, } except Exception as e: @@ -2063,6 +2446,7 @@ async def generate_excel_report( # Get related data for report context project = db.query(Project).filter_by(id=project_id).first() + _require_sound_project(project) location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None # Build full file path @@ -2349,7 +2733,7 @@ async def generate_excel_report( _plot_border.ln.solidFill = "000000" _plot_border.ln.w = 12700 chart.plot_area.spPr = _plot_border - ws.add_chart(chart, "H4") + ws.add_chart(chart, "I4") # --- Stats table: note at I28-I29, headers at I31, data rows 32-34 --- note1 = ws.cell(row=28, column=9, value="Note: Averages are calculated by determining the arithmetic average ") @@ -2493,6 +2877,7 @@ async def preview_report_data( # Get related data for report context project = db.query(Project).filter_by(id=project_id).first() + _require_sound_project(project) location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None # Build full file path @@ -2704,6 +3089,7 @@ async def generate_report_from_preview( raise HTTPException(status_code=403, detail="File does not belong to this project") project = db.query(Project).filter_by(id=project_id).first() + _require_sound_project(project) location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None # Extract data from request @@ -2835,7 +3221,7 @@ async def generate_report_from_preview( _plot_border.ln.solidFill = "000000" _plot_border.ln.w = 12700 chart.plot_area.spPr = _plot_border - ws.add_chart(chart, "H4") + ws.add_chart(chart, "I4") # --- Stats block starting at I28 --- # Stats table: note at I28-I29, headers at I31, data rows 32-34, border row 35 @@ -2984,6 +3370,7 @@ async def generate_combined_excel_report( project = db.query(Project).filter_by(id=project_id).first() if not project: raise HTTPException(status_code=404, detail="Project not found") + _require_sound_project(project) # Get all sessions with measurement files sessions = db.query(MonitoringSession).filter_by(project_id=project_id).all() @@ -3182,7 +3569,7 @@ async def generate_combined_excel_report( _plot_border.ln.solidFill = "000000" _plot_border.ln.w = 12700 chart.plot_area.spPr = _plot_border - ws.add_chart(chart, "H4") + ws.add_chart(chart, "I4") # Stats table: note at I28-I29, headers at I31, data rows 32-34, border row 35 note1 = ws.cell(row=28, column=9, value="Note: Averages are calculated by determining the arithmetic average ") @@ -3329,6 +3716,7 @@ async def combined_report_wizard( project = db.query(Project).filter_by(id=project_id).first() if not project: raise HTTPException(status_code=404, detail="Project not found") + _require_sound_project(project) sessions = db.query(MonitoringSession).filter_by(project_id=project_id).order_by(MonitoringSession.started_at).all() @@ -3421,7 +3809,10 @@ def _build_location_data_from_sessions(project_id: str, db, selected_session_ids "loc_name": loc_name, "session_label": session.session_label or "", "period_type": session.period_type or "", + "period_start_hour": session.period_start_hour, + "period_end_hour": session.period_end_hour, "started_at": session.started_at, + "report_date": session.report_date, "rows": [], } @@ -3462,25 +3853,39 @@ def _build_location_data_from_sessions(project_id: str, db, selected_session_ids pass parsed.append((dt, row)) - # Determine which rows to keep based on period_type - is_day_session = period_type in ('weekday_day', 'weekend_day') + # Determine effective hour window. + # Prefer per-session period_start/end_hour; fall back to hardcoded defaults. + sh = entry.get("period_start_hour") # e.g. 7 for Day, 19 for Night + eh = entry.get("period_end_hour") # e.g. 19 for Day, 7 for Night + if sh is None or eh is None: + # Legacy defaults based on period_type + is_day_session = period_type in ('weekday_day', 'weekend_day') + sh = 7 if is_day_session else 19 + eh = 19 if is_day_session else 7 + else: + is_day_session = eh > sh # crosses midnight when end < start + target_date = None if is_day_session: - # Day: 07:00–18:59 only, restricted to the LAST calendar date that has daytime rows - daytime_dates = sorted({ - dt.date() for dt, row in parsed - if dt and 7 <= dt.hour < 19 - }) - target_date = daytime_dates[-1] if daytime_dates else None + # Day-style: start_h <= hour < end_h, restricted to the LAST calendar date + in_window = lambda h: sh <= h < eh + if entry.get("report_date"): + target_date = entry["report_date"] + else: + daytime_dates = sorted({ + dt.date() for dt, row in parsed if dt and in_window(dt.hour) + }) + target_date = daytime_dates[-1] if daytime_dates else None filtered = [ (dt, row) for dt, row in parsed - if dt and dt.date() == target_date and 7 <= dt.hour < 19 + if dt and dt.date() == target_date and in_window(dt.hour) ] else: - # Night: 19:00–06:59, spanning both calendar days — no date restriction + # Night-style: hour >= start_h OR hour < end_h (crosses midnight) + in_window = lambda h: h >= sh or h < eh filtered = [ (dt, row) for dt, row in parsed - if dt and (dt.hour >= 19 or dt.hour < 7) + if dt and in_window(dt.hour) ] # Fall back to all rows if filtering removed everything @@ -3598,6 +4003,7 @@ async def generate_combined_from_preview( project = db.query(Project).filter_by(id=project_id).first() if not project: raise HTTPException(status_code=404, detail="Project not found") + _require_sound_project(project) report_title = data.get("report_title", "Background Noise Study") project_name = data.get("project_name", project.name) @@ -3760,7 +4166,7 @@ async def generate_combined_from_preview( _plot_border.ln.solidFill = "000000" _plot_border.ln.w = 12700 chart.plot_area.spPr = _plot_border - ws.add_chart(chart, "H4") + ws.add_chart(chart, "I4") hdr_fill_tbl = PatternFill(start_color="F2F2F2", end_color="F2F2F2", fill_type="solid") @@ -4073,6 +4479,7 @@ async def upload_all_project_data( project = db.query(Project).filter_by(id=project_id).first() if not project: raise HTTPException(status_code=404, detail="Project not found") + _require_sound_project(project) # Load all sound monitoring locations for this project locations = db.query(MonitoringLocation).filter_by( diff --git a/backend/templates_config.py b/backend/templates_config.py index d1c7360..96a871f 100644 --- a/backend/templates_config.py +++ b/backend/templates_config.py @@ -73,10 +73,16 @@ def jinja_log_tail_display(s): return str(s) +def jinja_local_datetime_input(dt): + """Jinja filter: format UTC datetime as local YYYY-MM-DDTHH:MM for .""" + return format_local_datetime(dt, "%Y-%m-%dT%H:%M") + + # Register Jinja filters and globals templates.env.filters["local_datetime"] = jinja_local_datetime templates.env.filters["local_time"] = jinja_local_time templates.env.filters["local_date"] = jinja_local_date +templates.env.filters["local_datetime_input"] = jinja_local_datetime_input templates.env.filters["fromjson"] = jinja_fromjson templates.env.globals["timezone_abbr"] = jinja_timezone_abbr templates.env.globals["get_user_timezone"] = get_user_timezone diff --git a/templates/partials/projects/session_list.html b/templates/partials/projects/session_list.html index 6b8b617..2886855 100644 --- a/templates/partials/projects/session_list.html +++ b/templates/partials/projects/session_list.html @@ -5,6 +5,7 @@ {% set s = item.session %} {% set loc = item.location %} {% set unit = item.unit %} + {% set effective_range = item.effective_range %} {# Period display maps #} {% set period_labels = { @@ -49,25 +50,74 @@ Failed {% endif %} - +
{{ s.notes }}
+ + {% if effective_range %} +Times are in your local timezone. The session label and period type will be updated automatically.
+{{ location.name }}{% if unit %} · {{ unit.id }}{% endif %}
+ {% endif %} +No files found for this session.
+
+ Use the Combined Report Wizard to generate an Excel report for this session, or click View on a Leq file above to access per-file reporting.
+ {% if session.period_start_hour is not none %}
+
Period window {{ session.period_start_hour }}:00–{{ session.period_end_hour }}:00 will be applied.
+ {% endif %}
+
No .rnd files found — upload data to generate a report.
+ {% endif %} + {% else %} +Reports are available after the session is completed.
+ {% endif %} +- Monitoring Location • {{ project.name }} + Monitoring Location • {{ project.name }}
No unit currently assigned
- @@ -214,47 +234,55 @@Attach a seismograph to this location
+Select a seismograph and optionally a modem for this location