From 80861997afd2390f79cdd7094a5fac8b9420f4ae Mon Sep 17 00:00:00 2001 From: serversdwn Date: Wed, 18 Feb 2026 06:44:04 +0000 Subject: [PATCH 01/31] Fix: removed duplicate download following scheduled stop. --- .../services/recurring_schedule_service.py | 25 +++---------------- backend/services/slmm_client.py | 2 +- 2 files changed, 4 insertions(+), 23 deletions(-) diff --git a/backend/services/recurring_schedule_service.py b/backend/services/recurring_schedule_service.py index d19ce41..bce6f7b 100644 --- a/backend/services/recurring_schedule_service.py +++ b/backend/services/recurring_schedule_service.py @@ -332,10 +332,12 @@ class RecurringScheduleService: ) actions.append(start_action) - # Create STOP action + # Create STOP action (stop_cycle handles download when include_download is True) stop_notes = json.dumps({ "schedule_name": schedule.name, "schedule_id": schedule.id, + "schedule_type": "weekly_calendar", + "include_download": schedule.include_download, }) stop_action = ScheduledAction( id=str(uuid.uuid4()), @@ -350,27 +352,6 @@ class RecurringScheduleService: ) actions.append(stop_action) - # Create DOWNLOAD action if enabled (1 minute after stop) - if schedule.include_download: - download_time = end_utc + timedelta(minutes=1) - download_notes = json.dumps({ - "schedule_name": schedule.name, - "schedule_id": schedule.id, - "schedule_type": "weekly_calendar", - }) - download_action = ScheduledAction( - id=str(uuid.uuid4()), - project_id=schedule.project_id, - location_id=schedule.location_id, - unit_id=unit_id, - action_type="download", - device_type=schedule.device_type, - scheduled_time=download_time, - execution_status="pending", - notes=download_notes, - ) - actions.append(download_action) - return actions def _generate_interval_actions( diff --git a/backend/services/slmm_client.py b/backend/services/slmm_client.py index b6b683e..b1dcae1 100644 --- a/backend/services/slmm_client.py +++ b/backend/services/slmm_client.py @@ -659,7 +659,7 @@ class SLMMClient: # Format as Auto_XXXX folder name folder_name = f"Auto_{index_number:04d}" - remote_path = f"/NL43_DATA/{folder_name}" + remote_path = f"/NL-43/{folder_name}" # Download the folder result = await self.download_folder(unit_id, remote_path) -- 2.49.1 From 28942600abe623da33d3b62075e50f2fe62f2a3f Mon Sep 17 00:00:00 2001 From: serversdwn Date: Wed, 18 Feb 2026 19:51:44 +0000 Subject: [PATCH 02/31] fix: Auto-downloaded files now show up in project_files data. --- backend/services/scheduler.py | 88 +++++++++++++++++++++++++++++++++-- 1 file changed, 85 insertions(+), 3 deletions(-) diff --git a/backend/services/scheduler.py b/backend/services/scheduler.py index a056cb4..7b9da92 100644 --- a/backend/services/scheduler.py +++ b/backend/services/scheduler.py @@ -295,9 +295,20 @@ class SchedulerService: stop_cycle handles: 1. Stop measurement 2. Enable FTP - 3. Download measurement folder - 4. Verify download + 3. Download measurement folder to SLMM local storage + + After stop_cycle, if download succeeded, this method fetches the ZIP + from SLMM and extracts it into Terra-View's project directory, creating + DataFile records for each file. """ + import hashlib + import io + import os + import zipfile + import httpx + from pathlib import Path + from backend.models import DataFile + # Parse notes for download preference include_download = True try: @@ -308,7 +319,7 @@ class SchedulerService: pass # Notes is plain text, not JSON # Execute the full stop cycle via device controller - # SLMM handles stop, FTP enable, and download + # SLMM handles stop, FTP enable, and download to SLMM-local storage cycle_response = await self.device_controller.stop_cycle( unit_id, action.device_type, @@ -340,10 +351,81 @@ class SchedulerService: except json.JSONDecodeError: pass + db.commit() + + # If SLMM downloaded the folder successfully, fetch the ZIP from SLMM + # and extract it into Terra-View's project directory, creating DataFile records + files_created = 0 + if include_download and cycle_response.get("download_success") and active_session: + folder_name = cycle_response.get("downloaded_folder") # e.g. "Auto_0058" + remote_path = f"/NL-43/{folder_name}" + + try: + SLMM_BASE_URL = os.getenv("SLMM_BASE_URL", "http://localhost:8100") + async with httpx.AsyncClient(timeout=600.0) as client: + zip_response = await client.post( + f"{SLMM_BASE_URL}/api/nl43/{unit_id}/ftp/download-folder", + json={"remote_path": remote_path} + ) + + if zip_response.is_success and len(zip_response.content) > 22: + base_dir = Path(f"data/Projects/{action.project_id}/{active_session.id}/{folder_name}") + base_dir.mkdir(parents=True, exist_ok=True) + + file_type_map = { + '.wav': 'audio', '.mp3': 'audio', + '.csv': 'data', '.txt': 'data', '.json': 'data', '.dat': 'data', + '.rnd': 'data', '.rnh': 'data', + '.log': 'log', + '.zip': 'archive', + '.jpg': 'image', '.jpeg': 'image', '.png': 'image', + '.pdf': 'document', + } + + with zipfile.ZipFile(io.BytesIO(zip_response.content)) as zf: + for zip_info in zf.filelist: + if zip_info.is_dir(): + continue + file_data = zf.read(zip_info.filename) + file_path = base_dir / zip_info.filename + file_path.parent.mkdir(parents=True, exist_ok=True) + with open(file_path, 'wb') as f: + f.write(file_data) + checksum = hashlib.sha256(file_data).hexdigest() + ext = os.path.splitext(zip_info.filename)[1].lower() + data_file = DataFile( + id=str(uuid.uuid4()), + session_id=active_session.id, + file_path=str(file_path.relative_to("data")), + file_type=file_type_map.get(ext, 'data'), + file_size_bytes=len(file_data), + downloaded_at=datetime.utcnow(), + checksum=checksum, + file_metadata=json.dumps({ + "source": "stop_cycle", + "remote_path": remote_path, + "unit_id": unit_id, + "folder_name": folder_name, + "relative_path": zip_info.filename, + }), + ) + db.add(data_file) + files_created += 1 + + db.commit() + logger.info(f"Created {files_created} DataFile records for session {active_session.id} from {folder_name}") + else: + logger.warning(f"ZIP from SLMM for {folder_name} was empty or failed, skipping DataFile creation") + + except Exception as e: + logger.error(f"Failed to extract ZIP and create DataFile records for {folder_name}: {e}") + # Don't fail the stop action — the device was stopped successfully + return { "status": "stopped", "session_id": active_session.id if active_session else None, "cycle_response": cycle_response, + "files_created": files_created, } async def _execute_download( -- 2.49.1 From dc77a362ce73ea4942e42a03c2f906a8f7af4550 Mon Sep 17 00:00:00 2001 From: serversdwn Date: Thu, 19 Feb 2026 01:25:07 +0000 Subject: [PATCH 03/31] fix: add TCP_IDLE_TTL and TCP_MAX_AGE environment variables for SLMM service --- docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index e97357e..3baee5a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -61,6 +61,8 @@ services: - PYTHONUNBUFFERED=1 - PORT=8100 - CORS_ORIGINS=* + - TCP_IDLE_TTL=-1 + - TCP_MAX_AGE=-1 restart: unless-stopped healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8100/health"] -- 2.49.1 From 65362bab214be7361453f7abf9dba86333d0f76a Mon Sep 17 00:00:00 2001 From: serversdwn Date: Thu, 19 Feb 2026 15:23:02 +0000 Subject: [PATCH 04/31] feat: implement project status management with 'on_hold' state and associated UI updates -feat: ability to hard delete projects, plus a soft delete with auto pruning. --- backend/migrate_add_project_deleted_at.py | 56 +++++++ backend/models.py | 3 +- backend/routers/projects.py | 82 ++++++++-- .../partials/projects/project_dashboard.html | 2 + templates/partials/projects/project_list.html | 4 + .../projects/project_list_compact.html | 2 + .../partials/projects/project_stats.html | 14 ++ templates/projects/detail.html | 149 +++++++++++++++++- templates/projects/overview.html | 8 +- 9 files changed, 300 insertions(+), 20 deletions(-) create mode 100644 backend/migrate_add_project_deleted_at.py diff --git a/backend/migrate_add_project_deleted_at.py b/backend/migrate_add_project_deleted_at.py new file mode 100644 index 0000000..d15ed34 --- /dev/null +++ b/backend/migrate_add_project_deleted_at.py @@ -0,0 +1,56 @@ +""" +Migration: Add deleted_at column to projects table + +Adds columns: +- projects.deleted_at: Timestamp set when status='deleted'; data hard-deleted after 60 days +""" + +import sqlite3 +import sys +from pathlib import Path + + +def migrate(db_path: str): + """Run the migration.""" + print(f"Migrating database: {db_path}") + + conn = sqlite3.connect(db_path) + cursor = conn.cursor() + + try: + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='projects'") + if not cursor.fetchone(): + print("projects table does not exist. Skipping migration.") + return + + cursor.execute("PRAGMA table_info(projects)") + existing_cols = {row[1] for row in cursor.fetchall()} + + if 'deleted_at' not in existing_cols: + print("Adding deleted_at column to projects...") + cursor.execute("ALTER TABLE projects ADD COLUMN deleted_at DATETIME") + else: + print("deleted_at column already exists. Skipping.") + + conn.commit() + print("Migration completed successfully!") + + except Exception as e: + print(f"Migration failed: {e}") + conn.rollback() + raise + finally: + conn.close() + + +if __name__ == "__main__": + db_path = "./data/terra-view.db" + + if len(sys.argv) > 1: + db_path = sys.argv[1] + + if not Path(db_path).exists(): + print(f"Database not found: {db_path}") + sys.exit(1) + + migrate(db_path) diff --git a/backend/models.py b/backend/models.py index 49ec9af..5f8eb99 100644 --- a/backend/models.py +++ b/backend/models.py @@ -155,7 +155,7 @@ class Project(Base): name = Column(String, nullable=False, unique=True) # Project/site name (e.g., "RKM Hall") description = Column(Text, nullable=True) project_type_id = Column(String, nullable=False) # FK to ProjectType.id - status = Column(String, default="active") # active, completed, archived + status = Column(String, default="active") # active, on_hold, completed, archived, deleted # Project metadata client_name = Column(String, nullable=True, index=True) # Client name (e.g., "PJ Dick") @@ -166,6 +166,7 @@ class Project(Base): created_at = Column(DateTime, default=datetime.utcnow) updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + deleted_at = Column(DateTime, nullable=True) # Set when status='deleted'; hard delete scheduled after 60 days class MonitoringLocation(Base): diff --git a/backend/routers/projects.py b/backend/routers/projects.py index 2fcf0f0..4beec68 100644 --- a/backend/routers/projects.py +++ b/backend/routers/projects.py @@ -57,9 +57,11 @@ async def get_projects_list( """ query = db.query(Project) - # Filter by status if provided + # Filter by status if provided; otherwise exclude soft-deleted projects if status: query = query.filter(Project.status == status) + else: + query = query.filter(Project.status != "deleted") # Filter by project type if provided if project_type_id: @@ -118,9 +120,10 @@ async def get_projects_stats(request: Request, db: Session = Depends(get_db)): Get summary statistics for projects overview. Returns HTML partial with stat cards. """ - # Count projects by status - total_projects = db.query(func.count(Project.id)).scalar() + # Count projects by status (exclude deleted) + total_projects = db.query(func.count(Project.id)).filter(Project.status != "deleted").scalar() active_projects = db.query(func.count(Project.id)).filter_by(status="active").scalar() + on_hold_projects = db.query(func.count(Project.id)).filter_by(status="on_hold").scalar() completed_projects = db.query(func.count(Project.id)).filter_by(status="completed").scalar() # Count total locations across all projects @@ -140,6 +143,7 @@ async def get_projects_stats(request: Request, db: Session = Depends(get_db)): "request": request, "total_projects": total_projects, "active_projects": active_projects, + "on_hold_projects": on_hold_projects, "completed_projects": completed_projects, "total_locations": total_locations, "assigned_units": assigned_units, @@ -178,13 +182,13 @@ async def search_projects( if not q.strip(): # Return recent active projects when no search term projects = db.query(Project).filter( - Project.status != "archived" + Project.status.notin_(["archived", "deleted"]) ).order_by(Project.updated_at.desc()).limit(limit).all() else: search_term = f"%{q}%" projects = db.query(Project).filter( and_( - Project.status != "archived", + Project.status.notin_(["archived", "deleted"]), or_( Project.project_number.ilike(search_term), Project.client_name.ilike(search_term), @@ -223,13 +227,13 @@ async def search_projects_json( """ if not q.strip(): projects = db.query(Project).filter( - Project.status != "archived" + Project.status.notin_(["archived", "deleted"]) ).order_by(Project.updated_at.desc()).limit(limit).all() else: search_term = f"%{q}%" projects = db.query(Project).filter( and_( - Project.status != "archived", + Project.status.notin_(["archived", "deleted"]), or_( Project.project_number.ilike(search_term), Project.client_name.ilike(search_term), @@ -359,18 +363,76 @@ async def update_project( @router.delete("/{project_id}") async def delete_project(project_id: str, db: Session = Depends(get_db)): """ - Delete a project (soft delete by archiving). + Soft-delete a project. Sets status='deleted' and records deleted_at timestamp. + Data will be permanently removed after 60 days (or via /permanent endpoint). """ project = db.query(Project).filter_by(id=project_id).first() if not project: raise HTTPException(status_code=404, detail="Project not found") - project.status = "archived" + project.status = "deleted" + project.deleted_at = datetime.utcnow() project.updated_at = datetime.utcnow() db.commit() - return {"success": True, "message": "Project archived successfully"} + return {"success": True, "message": "Project deleted. Data will be permanently removed after 60 days."} + + +@router.delete("/{project_id}/permanent") +async def permanently_delete_project(project_id: str, db: Session = Depends(get_db)): + """ + Hard-delete a project and all related data. Only allowed when status='deleted'. + Removes: locations, assignments, sessions, scheduled actions, recurring schedules. + """ + project = db.query(Project).filter_by(id=project_id).first() + if not project: + raise HTTPException(status_code=404, detail="Project not found") + if project.status != "deleted": + raise HTTPException(status_code=400, detail="Project must be soft-deleted before permanent deletion.") + + # Delete related data + db.query(RecurringSchedule).filter_by(project_id=project_id).delete() + db.query(ScheduledAction).filter_by(project_id=project_id).delete() + db.query(RecordingSession).filter_by(project_id=project_id).delete() + db.query(UnitAssignment).filter_by(project_id=project_id).delete() + db.query(MonitoringLocation).filter_by(project_id=project_id).delete() + db.delete(project) + db.commit() + + return {"success": True, "message": "Project permanently deleted."} + + +@router.post("/{project_id}/hold") +async def hold_project(project_id: str, db: Session = Depends(get_db)): + """ + Put a project on hold. Pauses without archiving; assignments and schedules remain. + """ + project = db.query(Project).filter_by(id=project_id).first() + if not project: + raise HTTPException(status_code=404, detail="Project not found") + + project.status = "on_hold" + project.updated_at = datetime.utcnow() + db.commit() + + return {"success": True, "message": "Project put on hold."} + + +@router.post("/{project_id}/unhold") +async def unhold_project(project_id: str, db: Session = Depends(get_db)): + """ + Resume a project that was on hold. + """ + project = db.query(Project).filter_by(id=project_id).first() + if not project: + raise HTTPException(status_code=404, detail="Project not found") + + project.status = "active" + project.updated_at = datetime.utcnow() + db.commit() + + return {"success": True, "message": "Project resumed."} # ============================================================================ diff --git a/templates/partials/projects/project_dashboard.html b/templates/partials/projects/project_dashboard.html index 94e95b9..1cf04f6 100644 --- a/templates/partials/projects/project_dashboard.html +++ b/templates/partials/projects/project_dashboard.html @@ -13,6 +13,8 @@ {% if project.status == 'active' %} Active + {% elif project.status == 'on_hold' %} + On Hold {% elif project.status == 'completed' %} Completed {% elif project.status == 'archived' %} diff --git a/templates/partials/projects/project_list.html b/templates/partials/projects/project_list.html index 60d1d3e..3005438 100644 --- a/templates/partials/projects/project_list.html +++ b/templates/partials/projects/project_list.html @@ -34,6 +34,10 @@ Active + {% elif item.project.status == 'on_hold' %} + + On Hold + {% elif item.project.status == 'completed' %} Completed diff --git a/templates/partials/projects/project_list_compact.html b/templates/partials/projects/project_list_compact.html index d5f78e4..a2acf79 100644 --- a/templates/partials/projects/project_list_compact.html +++ b/templates/partials/projects/project_list_compact.html @@ -16,6 +16,8 @@ {% if item.project.status == 'active' %} Active + {% elif item.project.status == 'on_hold' %} + On Hold {% elif item.project.status == 'completed' %} Completed {% elif item.project.status == 'archived' %} diff --git a/templates/partials/projects/project_stats.html b/templates/partials/projects/project_stats.html index 30b5ac7..1cf8926 100644 --- a/templates/partials/projects/project_stats.html +++ b/templates/partials/projects/project_stats.html @@ -27,6 +27,20 @@ +
+
+
+

On Hold

+

{{ on_hold_projects }}

+
+
+ + + +
+
+
+
diff --git a/templates/projects/detail.html b/templates/projects/detail.html index fa6c79d..ef0b903 100644 --- a/templates/projects/detail.html +++ b/templates/projects/detail.html @@ -279,6 +279,7 @@ @@ -329,14 +330,39 @@

Danger Zone

-
-

- Archive this project to remove it from active listings. All data will be preserved. -

- +
+ +
+
+

Put Project On Hold

+

Pause this project without archiving. Assignments and schedules remain in place.

+
+
+ +
+
+ +
+
+

Archive Project

+

Remove from active listings. All data is preserved and can be restored.

+
+ +
+ +
+
+

Delete Project

+

Permanently removes all project data after a 60-day grace period. This action is difficult to undo.

+
+ +
@@ -596,6 +622,40 @@
+ + + {% endblock %} diff --git a/templates/partials/projects/file_list.html b/templates/partials/projects/file_list.html index 979a8ed..3b5b382 100644 --- a/templates/partials/projects/file_list.html +++ b/templates/partials/projects/file_list.html @@ -151,9 +151,9 @@ -

No files downloaded yet

+

No data files yet

- Files will appear here once they are downloaded from the sound level meter + Files appear here after an FTP download from a connected meter, or after uploading SD card data manually.

{% endif %} diff --git a/templates/partials/projects/schedule_oneoff.html b/templates/partials/projects/schedule_oneoff.html index c3e4e4d..4b8dba4 100644 --- a/templates/partials/projects/schedule_oneoff.html +++ b/templates/partials/projects/schedule_oneoff.html @@ -5,7 +5,7 @@

One-Off Recording

- Schedule a single recording session with a specific start and end time. + Schedule a single monitoring session with a specific start and end time. Duration can be between 15 minutes and 24 hours.

diff --git a/templates/partials/projects/session_list.html b/templates/partials/projects/session_list.html index 51af907..957f431 100644 --- a/templates/partials/projects/session_list.html +++ b/templates/partials/projects/session_list.html @@ -1,4 +1,4 @@ - + {% if sessions %}
{% for item in sessions %} @@ -87,7 +87,7 @@ -

No recording sessions yet

+

No monitoring sessions yet

Schedule a session to get started

{% endif %} @@ -99,7 +99,7 @@ function viewSession(sessionId) { } function stopRecording(sessionId) { - if (!confirm('Stop this recording session?')) return; + if (!confirm('Stop this monitoring session?')) return; // TODO: Implement stop recording API call alert('Stop recording API coming soon for session: ' + sessionId); diff --git a/templates/projects/detail.html b/templates/projects/detail.html index 8fcb927..dc33a09 100644 --- a/templates/projects/detail.html +++ b/templates/projects/detail.html @@ -53,7 +53,7 @@ + + + + + + + + + + + +{% endblock %} -- 2.49.1 From 291fa8e862cafb5be9d8699e834c92708bafc3d7 Mon Sep 17 00:00:00 2001 From: serversdown Date: Wed, 25 Feb 2026 00:43:47 +0000 Subject: [PATCH 17/31] feat: Manual sound data uploads, standalone SLM type added.(no modem mode), Smart uploading with fuzzy name matching enabled. --- backend/main.py | 10 + backend/routers/project_locations.py | 64 ++++ backend/routers/projects.py | 301 ++++++++++++++++++ templates/nrl_detail.html | 39 ++- .../partials/projects/nrl_live_status.html | 89 ++++++ templates/projects/detail.html | 188 ++++++++++- 6 files changed, 685 insertions(+), 6 deletions(-) create mode 100644 templates/partials/projects/nrl_live_status.html diff --git a/backend/main.py b/backend/main.py index 777ff48..c20333f 100644 --- a/backend/main.py +++ b/backend/main.py @@ -364,6 +364,15 @@ async def nrl_detail_page( ) ).first() + # Parse connection_mode from location_metadata JSON + import json as _json + connection_mode = "connected" + try: + meta = _json.loads(location.location_metadata or "{}") + connection_mode = meta.get("connection_mode", "connected") + except Exception: + pass + template = "vibration_location_detail.html" if location.location_type == "vibration" else "nrl_detail.html" return templates.TemplateResponse(template, { "request": request, @@ -376,6 +385,7 @@ async def nrl_detail_page( "session_count": session_count, "file_count": file_count, "active_session": active_session, + "connection_mode": connection_mode, }) diff --git a/backend/routers/project_locations.py b/backend/routers/project_locations.py index 28b4127..8e31614 100644 --- a/backend/routers/project_locations.py +++ b/backend/routers/project_locations.py @@ -736,3 +736,67 @@ async def upload_nrl_data( "started_at": started_at.isoformat() if started_at else None, "stopped_at": stopped_at.isoformat() if stopped_at else None, } + + +# ============================================================================ +# NRL Live Status (connected NRLs only) +# ============================================================================ + +@router.get("/nrl/{location_id}/live-status", response_class=HTMLResponse) +async def get_nrl_live_status( + project_id: str, + location_id: str, + request: Request, + db: Session = Depends(get_db), +): + """ + Fetch cached status from SLMM for the unit assigned to this NRL and + return a compact HTML status card. Used in the NRL overview tab for + connected NRLs. Gracefully shows an offline message if SLMM is unreachable. + """ + import os + import httpx + + # Find the assigned unit + assignment = db.query(UnitAssignment).filter( + and_( + UnitAssignment.location_id == location_id, + UnitAssignment.status == "active", + ) + ).first() + + if not assignment: + return templates.TemplateResponse("partials/projects/nrl_live_status.html", { + "request": request, + "status": None, + "error": "No unit assigned", + }) + + unit = db.query(RosterUnit).filter_by(id=assignment.unit_id).first() + if not unit: + return templates.TemplateResponse("partials/projects/nrl_live_status.html", { + "request": request, + "status": None, + "error": "Assigned unit not found", + }) + + slmm_base = os.getenv("SLMM_BASE_URL", "http://localhost:8100") + status_data = None + error_msg = None + + try: + async with httpx.AsyncClient(timeout=5.0) as client: + resp = await client.get(f"{slmm_base}/api/nl43/{unit.id}/status") + if resp.status_code == 200: + status_data = resp.json() + else: + error_msg = f"SLMM returned {resp.status_code}" + except Exception as e: + error_msg = "SLMM unreachable" + + return templates.TemplateResponse("partials/projects/nrl_live_status.html", { + "request": request, + "unit": unit, + "status": status_data, + "error": error_msg, + }) diff --git a/backend/routers/projects.py b/backend/routers/projects.py index 4d5ea24..576d867 100644 --- a/backend/routers/projects.py +++ b/backend/routers/projects.py @@ -23,12 +23,18 @@ import io from backend.utils.timezone import utc_to_local, format_local_datetime from backend.database import get_db +from fastapi import UploadFile, File +import zipfile +import hashlib +import pathlib as _pathlib + from backend.models import ( Project, ProjectType, MonitoringLocation, UnitAssignment, MonitoringSession, + DataFile, ScheduledAction, RecurringSchedule, RosterUnit, @@ -2697,6 +2703,301 @@ async def generate_combined_excel_report( ) +# ============================================================================ +# Project-level bulk upload (entire date-folder structure) +# ============================================================================ + +def _bulk_parse_rnh(content: bytes) -> dict: + """Parse a Rion .rnh metadata file for session start/stop times and device info.""" + result = {} + try: + text = content.decode("utf-8", errors="replace") + for line in text.splitlines(): + line = line.strip() + if not line or line.startswith("["): + continue + if "," in line: + key, _, value = line.partition(",") + key = key.strip() + value = value.strip() + mapping = { + "Serial Number": "serial_number", + "Store Name": "store_name", + "Index Number": "index_number", + "Measurement Start Time": "start_time_str", + "Measurement Stop Time": "stop_time_str", + "Total Measurement Time": "total_time_str", + } + if key in mapping: + result[mapping[key]] = value + except Exception: + pass + return result + + +def _bulk_parse_datetime(s: str): + if not s: + return None + try: + return datetime.strptime(s.strip(), "%Y/%m/%d %H:%M:%S") + except Exception: + return None + + +def _bulk_classify_file(filename: str) -> str: + name = filename.lower() + if name.endswith(".rnh"): + return "log" + if name.endswith(".rnd"): + return "measurement" + if name.endswith(".mp3") or name.endswith(".wav") or name.endswith(".m4a"): + return "audio" + if name.endswith(".xlsx") or name.endswith(".xls") or name.endswith(".csv"): + return "data" + return "data" + + +# Files we skip entirely — already-converted outputs that don't need re-importing +_BULK_SKIP_EXTENSIONS = {".xlsx", ".xls"} + + +@router.post("/{project_id}/upload-all") +async def upload_all_project_data( + project_id: str, + request: Request, + db: Session = Depends(get_db), +): + """ + Bulk-import an entire structured data folder selected via webkitdirectory. + + Expected folder structure (flexible depth): + [date_folder]/[NRL_name]/[Auto_####]/ ← files here + -- OR -- + [NRL_name]/[Auto_####]/ ← files here (no date wrapper) + -- OR -- + [date_folder]/[NRL_name]/ ← files directly in NRL folder + + Each leaf folder group of .rnd/.rnh files becomes one MonitoringSession. + NRL folder names are matched case-insensitively to MonitoringLocation.name. + .mp3 files are stored as audio. .xlsx/.xls are skipped (already-converted). + Unmatched folders are reported but don't cause failure. + """ + form = await request.form() + + # Collect (relative_path, filename, bytes) for every uploaded file. + # The JS sends each file as "files" and its webkitRelativePath as "paths". + from collections import defaultdict + + uploaded_files = form.getlist("files") + uploaded_paths = form.getlist("paths") + + if not uploaded_files: + raise HTTPException(status_code=400, detail="No files received.") + + if len(uploaded_paths) != len(uploaded_files): + # Fallback: use bare filename if paths weren't sent + uploaded_paths = [f.filename for f in uploaded_files] + + project = db.query(Project).filter_by(id=project_id).first() + if not project: + raise HTTPException(status_code=404, detail="Project not found") + + # Load all sound monitoring locations for this project + locations = db.query(MonitoringLocation).filter_by( + project_id=project_id, + location_type="sound", + ).all() + + # Build a case-insensitive name → location map + loc_by_name: dict[str, MonitoringLocation] = { + loc.name.strip().lower(): loc for loc in locations + } + + def _normalize(s: str) -> str: + """Lowercase, strip spaces/hyphens/underscores for fuzzy comparison.""" + return s.lower().replace(" ", "").replace("-", "").replace("_", "") + + # Pre-build normalized keys for fuzzy matching + loc_by_normalized: dict[str, MonitoringLocation] = { + _normalize(loc.name): loc for loc in locations + } + + def _find_location_for_path(path: str): + """ + Walk path components from right and return first matching location. + Tries exact match first, then normalized (strips spaces/hyphens/underscores), + then checks if the location name *starts with* the normalized folder name. + e.g. folder "NRL 1" matches location "NRL1 - Test Location" + """ + components = path.replace("\\", "/").split("/") + for comp in reversed(components): + # Exact match + key = comp.strip().lower() + if key in loc_by_name: + return loc_by_name[key] + # Normalized match ("NRL 1" == "NRL1") + norm = _normalize(comp) + if norm in loc_by_normalized: + return loc_by_normalized[norm] + # Prefix match: location name starts with the folder component + # e.g. "NRL1" matches "NRL1 - Test Location" + for loc_norm, loc in loc_by_normalized.items(): + if loc_norm.startswith(norm) or norm.startswith(loc_norm): + return loc + return None + + def _session_group_key(parts: tuple) -> str: + """ + Determine the grouping key for a file path. + Files inside Auto_####/Auto_Leq/ or Auto_####/Auto_Lp_01/ are collapsed + up to their Auto_#### parent so they all land in the same session. + """ + # Find the deepest Auto_#### component (case-insensitive) + auto_idx = None + for i, p in enumerate(parts): + if p.lower().startswith("auto_") and not p.lower().startswith("auto_leq") and not p.lower().startswith("auto_lp"): + auto_idx = i + if auto_idx is not None: + # Group key = everything up to and including Auto_#### + return "/".join(parts[:auto_idx + 1]) + # Fallback: use the immediate parent folder + return "/".join(parts[:-1]) if len(parts) > 1 else "" + + # --- Group files by session key --- + groups: dict[str, list[tuple[str, bytes]]] = defaultdict(list) + + for rel_path, uf in zip(uploaded_paths, uploaded_files): + rel_path = rel_path.replace("\\", "/").strip("/") + parts = _pathlib.PurePosixPath(rel_path).parts + if not parts: + continue + fname = parts[-1] + # Skip already-converted Excel exports + if _pathlib.PurePosixPath(fname).suffix.lower() in _BULK_SKIP_EXTENSIONS: + continue + group_key = _session_group_key(parts) + data = await uf.read() + groups[group_key].append((fname, data)) + + # Aggregate by (location_id, date_label) so each Auto_#### group is one session + # key: (location_id or None, group_path) + session_results = [] + unmatched_paths = [] + total_files = 0 + total_sessions = 0 + + for group_path, file_list in sorted(groups.items()): + matched_loc = _find_location_for_path(group_path) + + if matched_loc is None: + unmatched_paths.append(group_path) + continue + + # Parse .rnh if present in this group + rnh_meta = {} + for fname, fbytes in file_list: + if fname.lower().endswith(".rnh"): + rnh_meta = _bulk_parse_rnh(fbytes) + break + + started_at = _bulk_parse_datetime(rnh_meta.get("start_time_str")) or datetime.utcnow() + stopped_at = _bulk_parse_datetime(rnh_meta.get("stop_time_str")) + duration_seconds = None + if started_at and stopped_at: + duration_seconds = int((stopped_at - started_at).total_seconds()) + + store_name = rnh_meta.get("store_name", "") + serial_number = rnh_meta.get("serial_number", "") + index_number = rnh_meta.get("index_number", "") + + session_id = str(uuid.uuid4()) + monitoring_session = MonitoringSession( + id=session_id, + project_id=project_id, + location_id=matched_loc.id, + unit_id=None, + session_type="sound", + started_at=started_at, + stopped_at=stopped_at, + duration_seconds=duration_seconds, + status="completed", + session_metadata=json.dumps({ + "source": "bulk_upload", + "group_path": group_path, + "store_name": store_name, + "serial_number": serial_number, + "index_number": index_number, + }), + ) + db.add(monitoring_session) + db.commit() + db.refresh(monitoring_session) + + # Write files + output_dir = _pathlib.Path("data/Projects") / project_id / session_id + output_dir.mkdir(parents=True, exist_ok=True) + + leq_count = 0 + lp_count = 0 + group_file_count = 0 + + for fname, fbytes in file_list: + file_type = _bulk_classify_file(fname) + fname_lower = fname.lower() + if fname_lower.endswith(".rnd"): + if "_leq_" in fname_lower: + leq_count += 1 + elif "_lp" in fname_lower: + lp_count += 1 + + dest = output_dir / fname + dest.write_bytes(fbytes) + checksum = hashlib.sha256(fbytes).hexdigest() + rel_path = str(dest.relative_to("data")) + + data_file = DataFile( + id=str(uuid.uuid4()), + session_id=session_id, + file_path=rel_path, + file_type=file_type, + file_size_bytes=len(fbytes), + downloaded_at=datetime.utcnow(), + checksum=checksum, + file_metadata=json.dumps({ + "source": "bulk_upload", + "original_filename": fname, + "group_path": group_path, + "store_name": store_name, + }), + ) + db.add(data_file) + group_file_count += 1 + + db.commit() + total_files += group_file_count + total_sessions += 1 + + session_results.append({ + "location_name": matched_loc.name, + "location_id": matched_loc.id, + "session_id": session_id, + "group_path": group_path, + "files": group_file_count, + "leq_files": leq_count, + "lp_files": lp_count, + "store_name": store_name, + "started_at": started_at.isoformat() if started_at else None, + }) + + return { + "success": True, + "sessions_created": total_sessions, + "files_imported": total_files, + "unmatched_folders": unmatched_paths, + "sessions": session_results, + } + + @router.get("/types/list", response_class=HTMLResponse) async def get_project_types(request: Request, db: Session = Depends(get_db)): """ diff --git a/templates/nrl_detail.html b/templates/nrl_detail.html index b702944..edebe16 100644 --- a/templates/nrl_detail.html +++ b/templates/nrl_detail.html @@ -70,7 +70,7 @@ class="tab-button px-4 py-3 border-b-2 border-transparent font-medium text-sm text-gray-600 dark:text-gray-400 hover:text-gray-900 dark:hover:text-white hover:border-gray-300 dark:hover:border-gray-600 transition-colors"> Settings - {% if assigned_unit %} + {% if assigned_unit and connection_mode == 'connected' %} + + + + + + + +
- +
+
Connected
+
Remote unit accessible via modem. Supports live control and FTP download.
+
+ + +
+
+
\u2022 ${sess.location_name} — ${sess.files} files`; + if (sess.leq_files || sess.lp_files) html += ` (${sess.leq_files} Leq, ${sess.lp_files} Lp)`; + if (sess.store_name) html += ` — ${sess.store_name}`; + html += ''; + } + html += ''; + } + if (data.unmatched_folders && data.unmatched_folders.length) { + html += `
\u26a0 Unmatched folders (no NRL location found): ${data.unmatched_folders.join(', ')}
`; + } + if (html) { + resultsEl.innerHTML = html; + resultsEl.classList.remove('hidden'); + } + + // Refresh the unified files view + htmx.trigger(document.getElementById('unified-files'), 'refresh'); + } else { + status.textContent = `Error: ${data.detail || 'Upload failed'}`; + status.className = 'text-sm text-red-600 dark:text-red-400'; + } + } catch (err) { + status.textContent = `Error: ${err.message}`; + status.className = 'text-sm text-red-600 dark:text-red-400'; + } +} + // Load project details on page load and restore active tab from URL hash document.addEventListener('DOMContentLoaded', function() { loadProjectDetails(); -- 2.49.1 From bd3d937a82c55525b5047746162c14dc2c9535a5 Mon Sep 17 00:00:00 2001 From: serversdown Date: Wed, 25 Feb 2026 21:41:51 +0000 Subject: [PATCH 18/31] feat: enhance project data handling with new Jinja filters and update UI labels for clarity --- backend/routers/projects.py | 13 +++++--- backend/templates_config.py | 31 +++++++++++++++++++ .../partials/projects/unified_files.html | 20 ++++++++++-- templates/projects/detail.html | 6 ++-- 4 files changed, 59 insertions(+), 11 deletions(-) diff --git a/backend/routers/projects.py b/backend/routers/projects.py index 576d867..87fecfe 100644 --- a/backend/routers/projects.py +++ b/backend/routers/projects.py @@ -2851,17 +2851,20 @@ async def upload_all_project_data( Determine the grouping key for a file path. Files inside Auto_####/Auto_Leq/ or Auto_####/Auto_Lp_01/ are collapsed up to their Auto_#### parent so they all land in the same session. + Only folder components are examined (not the filename, which is parts[-1]). """ - # Find the deepest Auto_#### component (case-insensitive) + # Only look at folder components — exclude the filename (last part) + folder_parts = parts[:-1] auto_idx = None - for i, p in enumerate(parts): - if p.lower().startswith("auto_") and not p.lower().startswith("auto_leq") and not p.lower().startswith("auto_lp"): + for i, p in enumerate(folder_parts): + p_lower = p.lower() + if p_lower.startswith("auto_") and not p_lower.startswith("auto_leq") and not p_lower.startswith("auto_lp"): auto_idx = i if auto_idx is not None: # Group key = everything up to and including Auto_#### - return "/".join(parts[:auto_idx + 1]) + return "/".join(folder_parts[:auto_idx + 1]) # Fallback: use the immediate parent folder - return "/".join(parts[:-1]) if len(parts) > 1 else "" + return "/".join(folder_parts) if folder_parts else "" # --- Group files by session key --- groups: dict[str, list[tuple[str, bytes]]] = defaultdict(list) diff --git a/backend/templates_config.py b/backend/templates_config.py index c0e4212..453b284 100644 --- a/backend/templates_config.py +++ b/backend/templates_config.py @@ -5,6 +5,7 @@ All routers should import `templates` from this module to get consistent filter and global function registration. """ +import json as _json from fastapi.templating import Jinja2Templates # Import timezone utilities @@ -32,8 +33,38 @@ def jinja_timezone_abbr(): # Create templates instance templates = Jinja2Templates(directory="templates") +def jinja_local_date(dt, fmt="%m-%d-%y"): + """Jinja filter: format a UTC datetime as a local date string (e.g. 02-19-26).""" + return format_local_datetime(dt, fmt) + + +def jinja_fromjson(s): + """Jinja filter: parse a JSON string into a dict (returns {} on failure).""" + if not s: + return {} + try: + return _json.loads(s) + except Exception: + return {} + + +def jinja_same_date(dt1, dt2) -> bool: + """Jinja global: True if two datetimes fall on the same local date.""" + if not dt1 or not dt2: + return False + try: + d1 = format_local_datetime(dt1, "%Y-%m-%d") + d2 = format_local_datetime(dt2, "%Y-%m-%d") + return d1 == d2 + except Exception: + return False + + # Register Jinja filters and globals templates.env.filters["local_datetime"] = jinja_local_datetime templates.env.filters["local_time"] = jinja_local_time +templates.env.filters["local_date"] = jinja_local_date +templates.env.filters["fromjson"] = jinja_fromjson templates.env.globals["timezone_abbr"] = jinja_timezone_abbr templates.env.globals["get_user_timezone"] = get_user_timezone +templates.env.globals["same_date"] = jinja_same_date diff --git a/templates/partials/projects/unified_files.html b/templates/partials/projects/unified_files.html index 2d56854..118c217 100644 --- a/templates/partials/projects/unified_files.html +++ b/templates/partials/projects/unified_files.html @@ -23,12 +23,26 @@
+ {% set meta = session.session_metadata|fromjson if session.session_metadata else {} %} + {% set is_manual = meta.get('source') in ('manual_upload', 'bulk_upload') %}
- {{ session.started_at|local_datetime if session.started_at else 'Unknown Date' }} + {% if location %}{{ location.name }}{% else %}Unknown Location{% endif %} + {% if session.started_at %} + — + {% if session.stopped_at and not same_date(session.started_at, session.stopped_at) %} + {{ session.started_at|local_date }} to {{ session.stopped_at|local_date }} + {% else %} + {{ session.started_at|local_date }} + {% endif %} + {% endif %}
- {% if unit %}{{ unit.id }}{% else %}Unknown Unit{% endif %} - {% if location %} @ {{ location.name }}{% endif %} + {% if is_manual %} + {% set store = meta.get('store_name') %} + Manual upload{% if store %} — Store {{ store }}{% endif %} + {% elif unit %} + {{ unit.id }} + {% endif %} {{ files|length }} file{{ 's' if files|length != 1 else '' }}
diff --git a/templates/projects/detail.html b/templates/projects/detail.html index 49fe689..7c579d4 100644 --- a/templates/projects/detail.html +++ b/templates/projects/detail.html @@ -235,7 +235,7 @@ - Upload All + Upload Days
- + + + + +
+ + +
+
+
+ + +
+
+ + +
+
+ + +
+
+
+ + +
+ + +
+
+ {% for loc in location_data %} + + {% endfor %} +
+
+ + +
+
+

+ {{ location_data[0].location_name if location_data else '' }} +

+
+ Right-click for options + | + Double-click to edit +
+
+ + {% for loc in location_data %} +
+
+
+ {% endfor %} +
+
+ + +
+

Editing Tips

+
    +
  • Double-click any cell to edit its value
  • +
  • Use the Comments column to add notes about specific measurements
  • +
  • Right-click a row to insert or delete rows
  • +
  • Press Enter to confirm edits, Escape to cancel
  • +
  • Switch between location tabs to edit each location's data independently
  • +
+
+ +
+ + + + + + + + + +{% endblock %} diff --git a/templates/combined_report_wizard.html b/templates/combined_report_wizard.html new file mode 100644 index 0000000..04551d7 --- /dev/null +++ b/templates/combined_report_wizard.html @@ -0,0 +1,371 @@ +{% extends "base.html" %} + +{% block title %}Combined Report Wizard - {{ project.name }}{% endblock %} + +{% block content %} +
+ +
+
+
+
+

Combined Report Wizard

+

{{ project.name }}

+
+ + ← Back to Project + +
+
+
+ +
+ + +
+

Report Settings

+ + +
+
+ + +
+ +
+ + +
+ + +
+ + +
+
+ + +
+
+ + +
+
+
+ + +
+

Time Filter

+

Applied to all locations. Leave blank to include all data.

+ + +
+ + + + +
+ + +
+
+ + +
+
+ + +
+
+ + +
+ +
+
+ + +
+
+ + +
+
+
+
+ + +
+
+
+

Locations to Include

+

+ {{ locations|length }} of {{ locations|length }} selected +

+
+
+ + +
+
+ + {% if locations %} +
+ {% for loc in locations %} + + {% endfor %} +
+ {% else %} +
+

No Leq measurement files found in this project.

+

Upload RND files with '_Leq_' in the filename to generate reports.

+
+ {% endif %} +
+ + +
+ + Cancel + + +
+ +
+
+ + +{% endblock %} diff --git a/templates/partials/projects/project_header.html b/templates/partials/projects/project_header.html index 14d0e12..8315aac 100644 --- a/templates/partials/projects/project_header.html +++ b/templates/partials/projects/project_header.html @@ -17,7 +17,7 @@
{% if project_type and project_type.id == 'sound_monitoring' %} - -- 2.49.1 From 015ce0a2546f8b7024ff62fa04c5c07ff6ff04ca Mon Sep 17 00:00:00 2001 From: serversdown Date: Thu, 5 Mar 2026 21:50:41 +0000 Subject: [PATCH 22/31] feat: add data collection mode to projects with UI updates and migration script --- ...igrate_add_project_data_collection_mode.py | 53 +++++++++++++++ backend/models.py | 5 ++ backend/routers/projects.py | 3 + templates/partials/project_create_modal.html | 45 +++++++++++++ .../partials/projects/project_header.html | 15 +++++ templates/projects/detail.html | 64 +++++++++++++++++-- 6 files changed, 181 insertions(+), 4 deletions(-) create mode 100644 backend/migrate_add_project_data_collection_mode.py diff --git a/backend/migrate_add_project_data_collection_mode.py b/backend/migrate_add_project_data_collection_mode.py new file mode 100644 index 0000000..dbac4d6 --- /dev/null +++ b/backend/migrate_add_project_data_collection_mode.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 +""" +Migration: Add data_collection_mode column to projects table. + +Values: + "remote" — units have modems; data pulled via FTP/scheduler automatically + "manual" — no modem; SD cards retrieved daily and uploaded by hand + +All existing projects are backfilled to "manual" (safe conservative default). + +Run once inside the Docker container: + docker exec terra-view python3 backend/migrate_add_project_data_collection_mode.py +""" +from pathlib import Path + +DB_PATH = Path("data/seismo_fleet.db") + + +def migrate(): + import sqlite3 + + if not DB_PATH.exists(): + print(f"Database not found at {DB_PATH}. Are you running from /home/serversdown/terra-view?") + return + + conn = sqlite3.connect(DB_PATH) + conn.row_factory = sqlite3.Row + cur = conn.cursor() + + # ── 1. Add column (idempotent) ─────────────────────────────────────────── + cur.execute("PRAGMA table_info(projects)") + existing_cols = {row["name"] for row in cur.fetchall()} + + if "data_collection_mode" not in existing_cols: + cur.execute("ALTER TABLE projects ADD COLUMN data_collection_mode TEXT DEFAULT 'manual'") + conn.commit() + print("✓ Added column data_collection_mode to projects") + else: + print("○ Column data_collection_mode already exists — skipping ALTER TABLE") + + # ── 2. Backfill NULLs to 'manual' ──────────────────────────────────────── + cur.execute("UPDATE projects SET data_collection_mode = 'manual' WHERE data_collection_mode IS NULL") + updated = cur.rowcount + conn.commit() + conn.close() + + if updated: + print(f"✓ Backfilled {updated} project(s) to data_collection_mode='manual'.") + print("Migration complete.") + + +if __name__ == "__main__": + migrate() diff --git a/backend/models.py b/backend/models.py index 2150d25..24738c4 100644 --- a/backend/models.py +++ b/backend/models.py @@ -157,6 +157,11 @@ class Project(Base): project_type_id = Column(String, nullable=False) # FK to ProjectType.id status = Column(String, default="active") # active, on_hold, completed, archived, deleted + # Data collection mode: how field data reaches Terra-View. + # "remote" — units have modems; data pulled via FTP/scheduler automatically + # "manual" — no modem; SD cards retrieved daily and uploaded by hand + data_collection_mode = Column(String, default="manual") # remote | manual + # Project metadata client_name = Column(String, nullable=True, index=True) # Client name (e.g., "PJ Dick") site_address = Column(String, nullable=True) diff --git a/backend/routers/projects.py b/backend/routers/projects.py index 3f63f98..9de68d8 100644 --- a/backend/routers/projects.py +++ b/backend/routers/projects.py @@ -613,6 +613,7 @@ async def get_project(project_id: str, db: Session = Depends(get_db)): "site_coordinates": project.site_coordinates, "start_date": project.start_date.isoformat() if project.start_date else None, "end_date": project.end_date.isoformat() if project.end_date else None, + "data_collection_mode": project.data_collection_mode or "manual", "created_at": project.created_at.isoformat(), "updated_at": project.updated_at.isoformat(), } @@ -659,6 +660,8 @@ async def update_project( project.start_date = datetime.fromisoformat(data["start_date"]) if data["start_date"] else None if "end_date" in data: project.end_date = datetime.fromisoformat(data["end_date"]) if data["end_date"] else None + if "data_collection_mode" in data and data["data_collection_mode"] in ("remote", "manual"): + project.data_collection_mode = data["data_collection_mode"] project.updated_at = datetime.utcnow() diff --git a/templates/partials/project_create_modal.html b/templates/partials/project_create_modal.html index 3758542..bd34fad 100644 --- a/templates/partials/project_create_modal.html +++ b/templates/partials/project_create_modal.html @@ -75,6 +75,32 @@ Include this modal in pages that use the project picker.
+
+ +
+ + +
+
+ @@ -98,6 +124,24 @@ Include this modal in pages that use the project picker. {% endblock %} diff --git a/templates/projects/detail.html b/templates/projects/detail.html index 40110ad..784cd55 100644 --- a/templates/projects/detail.html +++ b/templates/projects/detail.html @@ -264,16 +264,28 @@ file:mr-3 file:py-1.5 file:px-3 file:rounded-lg file:border-0 file:text-sm file:font-medium file:bg-seismo-orange file:text-white hover:file:bg-seismo-navy file:cursor-pointer" /> - - + + @@ -1642,75 +1654,148 @@ function toggleUploadAll() { document.getElementById('upload-all-results').classList.add('hidden'); document.getElementById('upload-all-results').innerHTML = ''; document.getElementById('upload-all-input').value = ''; + document.getElementById('upload-all-file-count').classList.add('hidden'); + document.getElementById('upload-all-progress-wrap').classList.add('hidden'); + document.getElementById('upload-all-progress-bar').style.width = '0%'; } } -async function submitUploadAll() { +// Show file count and filter info when folder is selected +document.getElementById('upload-all-input').addEventListener('change', function() { + const countEl = document.getElementById('upload-all-file-count'); + const total = this.files.length; + if (!total) { countEl.classList.add('hidden'); return; } + const wanted = Array.from(this.files).filter(_isWantedFile).length; + countEl.textContent = `${wanted} of ${total} files will be uploaded (Leq + .rnh only)`; + countEl.classList.remove('hidden'); +}); + +function _isWantedFile(f) { + const n = (f.webkitRelativePath || f.name).toLowerCase(); + const base = n.split('/').pop(); + if (base.endsWith('.rnh')) return true; + if (base.endsWith('.rnd')) { + if (base.includes('_leq_')) return true; // NL-43 Leq + if (base.startsWith('au2_')) return true; // AU2/NL-23 format + if (!base.includes('_lp')) return true; // unknown format — keep + } + return false; +} + +function submitUploadAll() { const input = document.getElementById('upload-all-input'); const status = document.getElementById('upload-all-status'); const resultsEl = document.getElementById('upload-all-results'); + const btn = document.getElementById('upload-all-btn'); + const cancelBtn = document.getElementById('upload-all-cancel-btn'); + const progressWrap = document.getElementById('upload-all-progress-wrap'); + const progressBar = document.getElementById('upload-all-progress-bar'); + const progressLabel = document.getElementById('upload-all-progress-label'); if (!input.files.length) { alert('Please select a folder to upload.'); return; } + // Filter client-side — only send Leq .rnd and .rnh files + const filesToSend = Array.from(input.files).filter(_isWantedFile); + if (!filesToSend.length) { + alert('No Leq .rnd or .rnh files found in selected folder.'); + return; + } + const formData = new FormData(); - for (const f of input.files) { - // webkitRelativePath gives the path relative to the selected folder root + for (const f of filesToSend) { formData.append('files', f); formData.append('paths', f.webkitRelativePath || f.name); } - status.textContent = `Uploading ${input.files.length} files\u2026`; - status.className = 'text-sm text-gray-500'; + // Disable controls and show progress + btn.disabled = true; + btn.textContent = 'Uploading\u2026'; + btn.classList.add('opacity-60', 'cursor-not-allowed'); + cancelBtn.disabled = true; + cancelBtn.classList.add('opacity-40', 'cursor-not-allowed'); + status.className = 'text-sm hidden'; resultsEl.classList.add('hidden'); + progressWrap.classList.remove('hidden'); + progressBar.style.width = '0%'; + progressLabel.textContent = `Uploading ${filesToSend.length} files\u2026`; - try { - const response = await fetch( - `/api/projects/{{ project_id }}/upload-all`, - { method: 'POST', body: formData } - ); - const data = await response.json(); + const xhr = new XMLHttpRequest(); - if (response.ok) { - const s = data.sessions_created; - const f = data.files_imported; - status.textContent = `\u2713 Imported ${f} file${f !== 1 ? 's' : ''} across ${s} session${s !== 1 ? 's' : ''}`; - status.className = 'text-sm text-green-600 dark:text-green-400'; - input.value = ''; + xhr.upload.addEventListener('progress', (e) => { + if (e.lengthComputable) { + const pct = Math.round((e.loaded / e.total) * 100); + progressBar.style.width = pct + '%'; + progressLabel.textContent = `Uploading ${filesToSend.length} files\u2026 ${pct}%`; + } + }); - // Build results summary - let html = ''; - if (data.sessions && data.sessions.length) { - html += '
Sessions created:
'; - html += '
    '; - for (const sess of data.sessions) { - html += `
  • \u2022 ${sess.location_name} — ${sess.files} files`; - if (sess.leq_files || sess.lp_files) html += ` (${sess.leq_files} Leq, ${sess.lp_files} Lp)`; - if (sess.store_name) html += ` — ${sess.store_name}`; - html += '
  • '; + xhr.upload.addEventListener('load', () => { + progressBar.style.width = '100%'; + progressLabel.textContent = 'Processing files on server\u2026'; + }); + + function _resetControls() { + progressWrap.classList.add('hidden'); + btn.disabled = false; + btn.textContent = 'Import'; + btn.classList.remove('opacity-60', 'cursor-not-allowed'); + cancelBtn.disabled = false; + cancelBtn.classList.remove('opacity-40', 'cursor-not-allowed'); + } + + xhr.addEventListener('load', () => { + _resetControls(); + try { + const data = JSON.parse(xhr.responseText); + if (xhr.status >= 200 && xhr.status < 300) { + const s = data.sessions_created; + const f = data.files_imported; + status.textContent = `\u2713 Imported ${f} file${f !== 1 ? 's' : ''} across ${s} session${s !== 1 ? 's' : ''}`; + status.className = 'text-sm text-green-600 dark:text-green-400'; + input.value = ''; + document.getElementById('upload-all-file-count').classList.add('hidden'); + + let html = ''; + if (data.sessions && data.sessions.length) { + html += '
    Sessions created:
    '; + html += '
      '; + for (const sess of data.sessions) { + html += `
    • \u2022 ${sess.location_name} — ${sess.files} files`; + if (sess.leq_files || sess.lp_files) html += ` (${sess.leq_files} Leq, ${sess.lp_files} Lp)`; + if (sess.store_name) html += ` — ${sess.store_name}`; + html += '
    • '; + } + html += '
    '; } - html += '
'; + if (data.unmatched_folders && data.unmatched_folders.length) { + html += `
\u26a0 Unmatched folders (no NRL location found): ${data.unmatched_folders.join(', ')}
`; + } + if (html) { + resultsEl.innerHTML = html; + resultsEl.classList.remove('hidden'); + } + htmx.trigger(document.getElementById('unified-files'), 'refresh'); + } else { + status.textContent = `Error: ${data.detail || 'Upload failed'}`; + status.className = 'text-sm text-red-600 dark:text-red-400'; } - if (data.unmatched_folders && data.unmatched_folders.length) { - html += `
\u26a0 Unmatched folders (no NRL location found): ${data.unmatched_folders.join(', ')}
`; - } - if (html) { - resultsEl.innerHTML = html; - resultsEl.classList.remove('hidden'); - } - - // Refresh the unified files view - htmx.trigger(document.getElementById('unified-files'), 'refresh'); - } else { - status.textContent = `Error: ${data.detail || 'Upload failed'}`; + } catch { + status.textContent = 'Error: Unexpected server response'; status.className = 'text-sm text-red-600 dark:text-red-400'; } - } catch (err) { - status.textContent = `Error: ${err.message}`; + }); + + xhr.addEventListener('error', () => { + _resetControls(); + status.textContent = 'Error: Network error during upload'; status.className = 'text-sm text-red-600 dark:text-red-400'; - } + }); + + xhr.open('POST', `/api/projects/{{ project_id }}/upload-all`); + xhr.send(formData); } // Load project details on page load and restore active tab from URL hash -- 2.49.1 From f89f04cd6f532e3365e33251657c1bb662c790c8 Mon Sep 17 00:00:00 2001 From: serversdown Date: Sat, 7 Mar 2026 00:16:58 +0000 Subject: [PATCH 28/31] feat: support for day time monitoring data, combined report generation now compaitible with mixed day and night types. --- backend/migrate_add_session_period_type.py | 131 +++++ backend/models.py | 8 + backend/routers/project_locations.py | 36 ++ backend/routers/projects.py | 430 +++++++++++----- templates/combined_report_wizard.html | 462 +++++++++--------- templates/partials/projects/session_list.html | 273 ++++++++--- 6 files changed, 941 insertions(+), 399 deletions(-) create mode 100644 backend/migrate_add_session_period_type.py diff --git a/backend/migrate_add_session_period_type.py b/backend/migrate_add_session_period_type.py new file mode 100644 index 0000000..386325b --- /dev/null +++ b/backend/migrate_add_session_period_type.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python3 +""" +Migration: Add session_label and period_type columns to monitoring_sessions. + +session_label - user-editable display name, e.g. "NRL-1 Sun 2/23 Night" +period_type - one of: weekday_day | weekday_night | weekend_day | weekend_night + Auto-derived from started_at when NULL. + +Period definitions (used in report stats table): + weekday_day Mon-Fri 07:00-22:00 -> Daytime (7AM-10PM) + weekday_night Mon-Fri 22:00-07:00 -> Nighttime (10PM-7AM) + weekend_day Sat-Sun 07:00-22:00 -> Daytime (7AM-10PM) + weekend_night Sat-Sun 22:00-07:00 -> Nighttime (10PM-7AM) + +Run once inside the Docker container: + docker exec terra-view python3 backend/migrate_add_session_period_type.py +""" +from pathlib import Path +from datetime import datetime + +DB_PATH = Path("data/seismo_fleet.db") + + +def _derive_period_type(started_at_str: str) -> str | None: + """Derive period_type from a started_at ISO datetime string.""" + if not started_at_str: + return None + try: + dt = datetime.fromisoformat(started_at_str) + except ValueError: + return None + is_weekend = dt.weekday() >= 5 # 5=Sat, 6=Sun + is_night = dt.hour >= 22 or dt.hour < 7 + if is_weekend: + return "weekend_night" if is_night else "weekend_day" + else: + return "weekday_night" if is_night else "weekday_day" + + +def _build_label(started_at_str: str, location_name: str | None, period_type: str | None) -> str | None: + """Build a human-readable session label.""" + if not started_at_str: + return None + try: + dt = datetime.fromisoformat(started_at_str) + except ValueError: + return None + + day_abbr = dt.strftime("%a") # Mon, Tue, Sun, etc. + date_str = dt.strftime("%-m/%-d") # 2/23 + + period_labels = { + "weekday_day": "Day", + "weekday_night": "Night", + "weekend_day": "Day", + "weekend_night": "Night", + } + period_str = period_labels.get(period_type or "", "") + + parts = [] + if location_name: + parts.append(location_name) + parts.append(f"{day_abbr} {date_str}") + if period_str: + parts.append(period_str) + return " — ".join(parts) + + +def migrate(): + import sqlite3 + + if not DB_PATH.exists(): + print(f"Database not found at {DB_PATH}. Are you running from /home/serversdown/terra-view?") + return + + conn = sqlite3.connect(DB_PATH) + conn.row_factory = sqlite3.Row + cur = conn.cursor() + + # 1. Add columns (idempotent) + cur.execute("PRAGMA table_info(monitoring_sessions)") + existing_cols = {row["name"] for row in cur.fetchall()} + + for col, typedef in [("session_label", "TEXT"), ("period_type", "TEXT")]: + if col not in existing_cols: + cur.execute(f"ALTER TABLE monitoring_sessions ADD COLUMN {col} {typedef}") + conn.commit() + print(f"✓ Added column {col} to monitoring_sessions") + else: + print(f"○ Column {col} already exists — skipping ALTER TABLE") + + # 2. Backfill existing rows + cur.execute( + """SELECT ms.id, ms.started_at, ms.location_id + FROM monitoring_sessions ms + WHERE ms.period_type IS NULL OR ms.session_label IS NULL""" + ) + sessions = cur.fetchall() + print(f"Backfilling {len(sessions)} session(s)...") + + updated = 0 + for row in sessions: + session_id = row["id"] + started_at = row["started_at"] + location_id = row["location_id"] + + # Look up location name + location_name = None + if location_id: + cur.execute("SELECT name FROM monitoring_locations WHERE id = ?", (location_id,)) + loc_row = cur.fetchone() + if loc_row: + location_name = loc_row["name"] + + period_type = _derive_period_type(started_at) + label = _build_label(started_at, location_name, period_type) + + cur.execute( + "UPDATE monitoring_sessions SET period_type = ?, session_label = ? WHERE id = ?", + (period_type, label, session_id), + ) + updated += 1 + + conn.commit() + conn.close() + print(f"✓ Backfilled {updated} session(s).") + print("Migration complete.") + + +if __name__ == "__main__": + migrate() diff --git a/backend/models.py b/backend/models.py index 24738c4..1c0a39d 100644 --- a/backend/models.py +++ b/backend/models.py @@ -272,6 +272,14 @@ class MonitoringSession(Base): duration_seconds = Column(Integer, nullable=True) status = Column(String, default="recording") # recording, completed, failed + # Human-readable label auto-derived from date/location, editable by user. + # e.g. "NRL-1 — Sun 2/23 — Night" + session_label = Column(String, nullable=True) + + # Period classification for report stats columns. + # weekday_day | weekday_night | weekend_day | weekend_night + period_type = Column(String, nullable=True) + # Snapshot of device configuration at recording time session_metadata = Column(Text, nullable=True) # JSON diff --git a/backend/routers/project_locations.py b/backend/routers/project_locations.py index adbc81a..45c1e4d 100644 --- a/backend/routers/project_locations.py +++ b/backend/routers/project_locations.py @@ -35,6 +35,37 @@ from backend.templates_config import templates router = APIRouter(prefix="/api/projects/{project_id}", tags=["project-locations"]) +# ============================================================================ +# Session period helpers +# ============================================================================ + +def _derive_period_type(dt: datetime) -> str: + """ + Classify a session start time into one of four period types. + Night = 22:00–07:00, Day = 07:00–22:00. + Weekend = Saturday (5) or Sunday (6). + """ + is_weekend = dt.weekday() >= 5 + is_night = dt.hour >= 22 or dt.hour < 7 + if is_weekend: + return "weekend_night" if is_night else "weekend_day" + return "weekday_night" if is_night else "weekday_day" + + +def _build_session_label(dt: datetime, location_name: str, period_type: str) -> str: + """Build a human-readable session label, e.g. 'NRL-1 — Sun 2/23 — Night'.""" + day_abbr = dt.strftime("%a") + date_str = f"{dt.month}/{dt.day}" + period_str = { + "weekday_day": "Day", + "weekday_night": "Night", + "weekend_day": "Day", + "weekend_night": "Night", + }.get(period_type, "") + parts = [p for p in [location_name, f"{day_abbr} {date_str}", period_str] if p] + return " — ".join(parts) + + # ============================================================================ # Monitoring Locations CRUD # ============================================================================ @@ -676,6 +707,9 @@ async def upload_nrl_data( index_number = rnh_meta.get("index_number", "") # --- Step 3: Create MonitoringSession --- + period_type = _derive_period_type(started_at) if started_at else None + session_label = _build_session_label(started_at, location.name, period_type) if started_at else None + session_id = str(uuid.uuid4()) monitoring_session = MonitoringSession( id=session_id, @@ -687,6 +721,8 @@ async def upload_nrl_data( stopped_at=stopped_at, duration_seconds=duration_seconds, status="completed", + session_label=session_label, + period_type=period_type, session_metadata=json.dumps({ "source": "manual_upload", "store_name": store_name, diff --git a/backend/routers/projects.py b/backend/routers/projects.py index 5a9002f..3bf23e9 100644 --- a/backend/routers/projects.py +++ b/backend/routers/projects.py @@ -1794,6 +1794,34 @@ async def delete_session( }) +VALID_PERIOD_TYPES = {"weekday_day", "weekday_night", "weekend_day", "weekend_night"} + +@router.patch("/{project_id}/sessions/{session_id}") +async def patch_session( + project_id: str, + session_id: str, + data: dict, + db: Session = Depends(get_db), +): + """Update session_label and/or period_type on a monitoring session.""" + session = db.query(MonitoringSession).filter_by(id=session_id).first() + if not session: + raise HTTPException(status_code=404, detail="Session not found") + if session.project_id != project_id: + raise HTTPException(status_code=403, detail="Session does not belong to this project") + + if "session_label" in data: + session.session_label = str(data["session_label"]).strip() or None + if "period_type" in data: + pt = data["period_type"] + if pt and pt not in VALID_PERIOD_TYPES: + raise HTTPException(status_code=400, detail=f"Invalid period_type. Must be one of: {', '.join(sorted(VALID_PERIOD_TYPES))}") + session.period_type = pt or None + + db.commit() + return JSONResponse({"status": "success", "session_label": session.session_label, "period_type": session.period_type}) + + @router.get("/{project_id}/files/{file_id}/view-rnd", response_class=HTMLResponse) async def view_rnd_file( request: Request, @@ -3277,32 +3305,59 @@ async def combined_report_wizard( ): """Configuration page for the combined multi-location report wizard.""" from backend.models import ReportTemplate + from pathlib import Path as _Path project = db.query(Project).filter_by(id=project_id).first() if not project: raise HTTPException(status_code=404, detail="Project not found") - sessions = db.query(MonitoringSession).filter_by(project_id=project_id).all() + sessions = db.query(MonitoringSession).filter_by(project_id=project_id).order_by(MonitoringSession.started_at).all() - # Build location list with Leq file counts (no filtering) - location_file_counts: dict = {} + # Build location -> sessions list, only including sessions that have Leq files + location_sessions: dict = {} # loc_name -> list of session dicts for session in sessions: files = db.query(DataFile).filter_by(session_id=session.id).all() + has_leq = False for file in files: if not file.file_path or not file.file_path.lower().endswith('.rnd'): continue - from pathlib import Path as _Path abs_path = _Path("data") / file.file_path peek = _peek_rnd_headers(abs_path) - if not _is_leq_file(file.file_path, peek): - continue - location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None - loc_name = location.name if location else f"Session {session.id[:8]}" - location_file_counts[loc_name] = location_file_counts.get(loc_name, 0) + 1 + if _is_leq_file(file.file_path, peek): + has_leq = True + break + if not has_leq: + continue + + location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None + loc_name = location.name if location else f"Session {session.id[:8]}" + + if loc_name not in location_sessions: + location_sessions[loc_name] = [] + + # Build a display date and day-of-week from started_at + date_display = "" + day_of_week = "" + if session.started_at: + date_display = session.started_at.strftime("%-m/%-d/%Y") + day_of_week = session.started_at.strftime("%A") # Monday, Sunday, etc. + + location_sessions[loc_name].append({ + "session_id": session.id, + "session_label": session.session_label or "", + "date_display": date_display, + "day_of_week": day_of_week, + "started_at": session.started_at.isoformat() if session.started_at else "", + "stopped_at": session.stopped_at.isoformat() if session.stopped_at else "", + "duration_h": (session.duration_seconds // 3600) if session.duration_seconds else 0, + "duration_m": ((session.duration_seconds % 3600) // 60) if session.duration_seconds else 0, + "period_type": session.period_type or "", + "status": session.status, + }) locations = [ - {"name": name, "file_count": count} - for name, count in sorted(location_file_counts.items()) + {"name": name, "sessions": sess_list} + for name, sess_list in sorted(location_sessions.items()) ] report_templates = db.query(ReportTemplate).all() @@ -3312,10 +3367,111 @@ async def combined_report_wizard( "project": project, "project_id": project_id, "locations": locations, + "locations_json": json.dumps(locations), "report_templates": report_templates, }) +def _build_location_data_from_sessions(project_id: str, db, selected_session_ids: list) -> dict: + """ + Build per-location spreadsheet data using an explicit list of session IDs. + Only rows from those sessions are included. Per-session period_type is + stored on each row so the report can filter stats correctly. + """ + from pathlib import Path as _Path + + project = db.query(Project).filter_by(id=project_id).first() + if not project: + raise HTTPException(status_code=404, detail="Project not found") + + if not selected_session_ids: + raise HTTPException(status_code=400, detail="No sessions selected.") + + # Load every requested session — one entry per (session_id, loc_name) pair. + # Keyed by session_id so overnight sessions are never split by calendar date. + session_entries: dict = {} # session_id -> {loc_name, session_label, period_type, rows[]} + + for session_id in selected_session_ids: + session = db.query(MonitoringSession).filter_by(id=session_id, project_id=project_id).first() + if not session: + continue + location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None + loc_name = location.name if location else f"Session {session_id[:8]}" + + session_entries[session_id] = { + "loc_name": loc_name, + "session_label": session.session_label or "", + "period_type": session.period_type or "", + "started_at": session.started_at, + "rows": [], + } + + files = db.query(DataFile).filter_by(session_id=session_id).all() + for file in files: + if not file.file_path or not file.file_path.lower().endswith('.rnd'): + continue + abs_path = _Path("data") / file.file_path + peek = _peek_rnd_headers(abs_path) + if not _is_leq_file(file.file_path, peek): + continue + rows = _read_rnd_file_rows(file.file_path) + rows, _ = _normalize_rnd_rows(rows) + session_entries[session_id]["rows"].extend(rows) + + if not any(e["rows"] for e in session_entries.values()): + raise HTTPException(status_code=404, detail="No Leq data found in the selected sessions.") + + location_data = [] + for session_id in selected_session_ids: + entry = session_entries.get(session_id) + if not entry or not entry["rows"]: + continue + + loc_name = entry["loc_name"] + period_type = entry["period_type"] + raw_rows = sorted(entry["rows"], key=lambda r: r.get('Start Time', '')) + + spreadsheet_data = [] + for idx, row in enumerate(raw_rows, 1): + start_time_str = row.get('Start Time', '') + date_str = time_str = '' + if start_time_str: + try: + dt = datetime.strptime(start_time_str, '%Y/%m/%d %H:%M:%S') + date_str = dt.strftime('%Y-%m-%d') + time_str = dt.strftime('%H:%M') + except ValueError: + date_str = start_time_str + + lmax = row.get('Lmax(Main)', '') + ln1 = row.get('LN1(Main)', '') + ln2 = row.get('LN2(Main)', '') + + spreadsheet_data.append([ + idx, + date_str, + time_str, + lmax if lmax else '', + ln1 if ln1 else '', + ln2 if ln2 else '', + '', + period_type, # col index 7 — hidden, used by report gen for day/night bucketing + ]) + + location_data.append({ + "session_id": session_id, + "location_name": loc_name, + "session_label": entry["session_label"], + "period_type": period_type, + "started_at": entry["started_at"].isoformat() if entry["started_at"] else "", + "raw_count": len(raw_rows), + "filtered_count": len(raw_rows), + "spreadsheet_data": spreadsheet_data, + }) + + return {"project": project, "location_data": location_data} + + @router.get("/{project_id}/combined-report-preview", response_class=HTMLResponse) async def combined_report_preview( request: Request, @@ -3323,38 +3479,19 @@ async def combined_report_preview( report_title: str = Query("Background Noise Study"), project_name: str = Query(""), client_name: str = Query(""), - start_time: str = Query(""), - end_time: str = Query(""), - start_date: str = Query(""), - end_date: str = Query(""), - enabled_locations: str = Query(""), + selected_sessions: str = Query(""), # comma-separated session IDs db: Session = Depends(get_db), ): """Preview and edit combined report data before generating the Excel file.""" - enabled_list = [loc.strip() for loc in enabled_locations.split(',') if loc.strip()] if enabled_locations else None + session_ids = [s.strip() for s in selected_sessions.split(',') if s.strip()] if selected_sessions else [] - result = _build_combined_location_data( - project_id, db, - start_time=start_time, - end_time=end_time, - start_date=start_date, - end_date=end_date, - enabled_locations=enabled_list, - ) + result = _build_location_data_from_sessions(project_id, db, session_ids) project = result["project"] location_data = result["location_data"] - total_rows = sum(loc["filtered_count"] for loc in location_data) final_project_name = project_name if project_name else project.name - # Build time filter display string - time_filter_desc = "" - if start_time and end_time: - time_filter_desc = f"{start_time} – {end_time}" - elif start_time or end_time: - time_filter_desc = f"{start_time or ''} – {end_time or ''}" - return templates.TemplateResponse("combined_report_preview.html", { "request": request, "project": project, @@ -3362,11 +3499,7 @@ async def combined_report_preview( "report_title": report_title, "project_name": final_project_name, "client_name": client_name, - "start_time": start_time, - "end_time": end_time, - "start_date": start_date, - "end_date": end_date, - "time_filter_desc": time_filter_desc, + "time_filter_desc": f"{len(session_ids)} session{'s' if len(session_ids) != 1 else ''} selected", "location_data": location_data, "locations_json": json.dumps(location_data), "total_rows": total_rows, @@ -3474,13 +3607,14 @@ async def generate_combined_from_preview( b_inner = last_inner if is_last else data_inner b_right = last_right if is_last else data_right - test_num = row[0] if len(row) > 0 else row_idx + 1 - date_val = row[1] if len(row) > 1 else '' - time_val = row[2] if len(row) > 2 else '' - lmax = row[3] if len(row) > 3 else '' - ln1 = row[4] if len(row) > 4 else '' - ln2 = row[5] if len(row) > 5 else '' - comment = row[6] if len(row) > 6 else '' + test_num = row[0] if len(row) > 0 else row_idx + 1 + date_val = row[1] if len(row) > 1 else '' + time_val = row[2] if len(row) > 2 else '' + lmax = row[3] if len(row) > 3 else '' + ln1 = row[4] if len(row) > 4 else '' + ln2 = row[5] if len(row) > 5 else '' + comment = row[6] if len(row) > 6 else '' + row_period = row[7] if len(row) > 7 else '' # hidden period_type from session c = ws.cell(row=dr, column=1, value=test_num) c.font = f_data; c.alignment = center_a; c.border = b_left @@ -3505,15 +3639,8 @@ async def generate_combined_from_preview( if isinstance(ln2, (int, float)): ln2_vals.append(ln2) - if time_val and isinstance(lmax, (int, float)) and isinstance(ln1, (int, float)) and isinstance(ln2, (int, float)): - try: - try: - row_dt = datetime.strptime(str(time_val), '%H:%M') - except ValueError: - row_dt = datetime.strptime(str(time_val), '%H:%M:%S') - parsed_rows.append((row_dt, float(lmax), float(ln1), float(ln2))) - except (ValueError, TypeError): - pass + if isinstance(lmax, (int, float)) and isinstance(ln1, (int, float)) and isinstance(ln2, (int, float)): + parsed_rows.append((row_period, float(lmax), float(ln1), float(ln2))) data_end_row = data_start_row + len(day_rows) - 1 @@ -3548,44 +3675,109 @@ async def generate_combined_from_preview( ws.merge_cells(start_row=29, start_column=9, end_row=29, end_column=14) hdr_fill_tbl = PatternFill(start_color="F2F2F2", end_color="F2F2F2", fill_type="solid") - c = ws.cell(row=31, column=9, value=""); c.border = tbl_top_left; c.font = f_bold - c = ws.cell(row=31, column=10, value="Evening (7PM to 10PM)") - c.font = f_bold; c.alignment = Alignment(horizontal='center', vertical='center', wrap_text=True) - c.border = tbl_top_mid; c.fill = hdr_fill_tbl - ws.merge_cells(start_row=31, start_column=10, end_row=31, end_column=11) - c = ws.cell(row=31, column=12, value="Nighttime (10PM to 7AM)") - c.font = f_bold; c.alignment = Alignment(horizontal='center', vertical='center', wrap_text=True) - c.border = tbl_top_right; c.fill = hdr_fill_tbl - ws.merge_cells(start_row=31, start_column=12, end_row=31, end_column=13) - ws.row_dimensions[31].height = 15 - - evening = [(lmx, l1, l2) for dt, lmx, l1, l2 in parsed_rows if 19 <= dt.hour < 22] - nighttime = [(lmx, l1, l2) for dt, lmx, l1, l2 in parsed_rows if dt.hour >= 22 or dt.hour < 7] def _avg(vals): return round(sum(vals) / len(vals), 1) if vals else None def _max(vals): return round(max(vals), 1) if vals else None - def write_stat(row_num, label, eve_val, night_val, is_last=False): + # --- Dynamic period detection ---------------------------------------- + # Use the period_type stored on each row (from the session record). + # Rows without a period_type fall back to time-of-day detection. + # The four canonical types map to two display columns: + # Day -> "Daytime (7AM to 10PM)" + # Night -> "Nighttime (10PM to 7AM)" + PERIOD_TYPE_IS_DAY = {"weekday_day", "weekend_day"} + PERIOD_TYPE_IS_NIGHT = {"weekday_night", "weekend_night"} + + day_rows_data = [] + night_rows_data = [] + for pt, lmx, l1, l2 in parsed_rows: + if pt in PERIOD_TYPE_IS_DAY: + day_rows_data.append((lmx, l1, l2)) + elif pt in PERIOD_TYPE_IS_NIGHT: + night_rows_data.append((lmx, l1, l2)) + else: + # No period_type — fall back to time-of-day (shouldn't happen for + # new uploads, but handles legacy data gracefully) + # We can't derive from time here since parsed_rows no longer stores dt. + # Put in day as a safe default. + day_rows_data.append((lmx, l1, l2)) + + all_candidate_periods = [ + ("Daytime (7AM to 10PM)", day_rows_data), + ("Nighttime (10PM to 7AM)", night_rows_data), + ] + active_periods = [(label, rows) for label, rows in all_candidate_periods if rows] + + # If nothing at all, show both columns empty + if not active_periods: + active_periods = [("Daytime (7AM to 10PM)", []), ("Nighttime (10PM to 7AM)", [])] + + # Build header row (row 31) with one merged pair of columns per active period + # Layout: col 9 = row label, then pairs: (10,11), (12,13), (14,15) + num_periods = len(active_periods) + period_start_cols = [10 + i * 2 for i in range(num_periods)] + + # Left/right border helpers for the header row + def _hdr_border(i, n): + is_first = (i == 0) + is_last = (i == n - 1) + return Border( + left=med if is_first else thin, + right=med if is_last else thin, + top=med, + bottom=thin, + ) + def _mid_border(i, n, is_data_last=False): + is_first = (i == 0) + is_last = (i == n - 1) + b = tbl_bot_mid if is_data_last else tbl_mid_mid + return Border( + left=med if is_first else thin, + right=med if is_last else thin, + top=b.top, + bottom=b.bottom, + ) + + c = ws.cell(row=31, column=9, value=""); c.border = tbl_top_left; c.font = f_bold + ws.row_dimensions[31].height = 30 + + for i, (period_label, _) in enumerate(active_periods): + sc = period_start_cols[i] + is_last_col = (i == num_periods - 1) + c = ws.cell(row=31, column=sc, value=period_label.replace('\n', ' ')) + c.font = f_bold + c.alignment = Alignment(horizontal='center', vertical='center', wrap_text=True) + c.border = _hdr_border(i, num_periods) + c.fill = hdr_fill_tbl + ws.merge_cells(start_row=31, start_column=sc, end_row=31, end_column=sc + 1) + + def write_stat_dynamic(row_num, row_label, period_vals_list, is_last=False): bl = tbl_bot_left if is_last else tbl_mid_left - bm = tbl_bot_mid if is_last else tbl_mid_mid - br = tbl_bot_right if is_last else tbl_mid_right - lbl = ws.cell(row=row_num, column=9, value=label) + lbl = ws.cell(row=row_num, column=9, value=row_label) lbl.font = f_data; lbl.border = bl lbl.alignment = Alignment(horizontal='left', vertical='center') - ev_str = f"{eve_val} dBA" if eve_val is not None else "" - ev = ws.cell(row=row_num, column=10, value=ev_str) - ev.font = f_bold; ev.border = bm - ev.alignment = Alignment(horizontal='center', vertical='center') - ws.merge_cells(start_row=row_num, start_column=10, end_row=row_num, end_column=11) - ni_str = f"{night_val} dBA" if night_val is not None else "" - ni = ws.cell(row=row_num, column=12, value=ni_str) - ni.font = f_bold; ni.border = br - ni.alignment = Alignment(horizontal='center', vertical='center') - ws.merge_cells(start_row=row_num, start_column=12, end_row=row_num, end_column=13) + n = len(period_vals_list) + for i, val in enumerate(period_vals_list): + sc = period_start_cols[i] + is_last_col = (i == n - 1) + val_str = f"{val} dBA" if val is not None else "" + c = ws.cell(row=row_num, column=sc, value=val_str) + c.font = f_bold + c.alignment = Alignment(horizontal='center', vertical='center') + c.border = Border( + left=med if i == 0 else thin, + right=med if is_last_col else thin, + top=tbl_bot_mid.top if is_last else tbl_mid_mid.top, + bottom=tbl_bot_mid.bottom if is_last else tbl_mid_mid.bottom, + ) + ws.merge_cells(start_row=row_num, start_column=sc, end_row=row_num, end_column=sc + 1) - write_stat(32, "LAmax", _max([v[0] for v in evening]), _max([v[0] for v in nighttime])) - write_stat(33, "LA01 Average", _avg([v[1] for v in evening]), _avg([v[1] for v in nighttime])) - write_stat(34, "LA10 Average", _avg([v[2] for v in evening]), _avg([v[2] for v in nighttime]), is_last=True) + write_stat_dynamic(32, "LAmax", + [_max([v[0] for v in rows]) for _, rows in active_periods]) + write_stat_dynamic(33, "LA01 Average", + [_avg([v[1] for v in rows]) for _, rows in active_periods]) + write_stat_dynamic(34, "LA10 Average", + [_avg([v[2] for v in rows]) for _, rows in active_periods], is_last=True) ws.sheet_properties.pageSetUpPr = PageSetupProperties(fitToPage=False) ws.page_setup.orientation = 'portrait' @@ -3624,58 +3816,58 @@ async def generate_combined_from_preview( summary_ws.cell(row=idx, column=5, value=s['ln2_avg'] or '-').border = thin_border # ---------------------------------------------------------------- - # Split each location's rows by date, collect all unique dates + # Build one workbook per session (each location entry is one session) # ---------------------------------------------------------------- - # Structure: dates_map[date_str][loc_name] = [row, ...] - dates_map: dict = {} - for loc_info in locations: - loc_name = loc_info.get("location_name", "Unknown") - rows = loc_info.get("spreadsheet_data", []) - for row in rows: - date_val = str(row[1]).strip() if len(row) > 1 else '' - if not date_val: - date_val = "Unknown Date" - dates_map.setdefault(date_val, {}).setdefault(loc_name, []).append(row) + if not locations: + raise HTTPException(status_code=400, detail="No location data provided") - if not dates_map: - raise HTTPException(status_code=400, detail="No data rows found in provided location data") - - sorted_dates = sorted(dates_map.keys()) project_name_clean = "".join(c for c in project_name if c.isalnum() or c in ('_', '-', ' ')).strip().replace(' ', '_') + final_title = f"{report_title} - {project_name}" - # ---------------------------------------------------------------- - # Build one workbook per day, zip them - # ---------------------------------------------------------------- zip_buffer = io.BytesIO() with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf: - for date_str in sorted_dates: - loc_data_for_day = dates_map[date_str] - final_title = f"{report_title} - {project_name}" + for loc_info in locations: + loc_name = loc_info.get("location_name", "Unknown") + session_label = loc_info.get("session_label", "") + period_type = loc_info.get("period_type", "") + started_at_str = loc_info.get("started_at", "") + rows = loc_info.get("spreadsheet_data", []) + if not rows: + continue + + # Re-number interval # sequentially + for i, row in enumerate(rows): + if len(row) > 0: + row[0] = i + 1 wb = openpyxl.Workbook() wb.remove(wb.active) - loc_summaries = [] - for loc_name in sorted(loc_data_for_day.keys()): - day_rows = loc_data_for_day[loc_name] - # Re-number interval # sequentially for this day - for i, row in enumerate(day_rows): - if len(row) > 0: - row[0] = i + 1 + safe_sheet = "".join(c for c in loc_name if c.isalnum() or c in (' ', '-', '_'))[:31] + ws = wb.create_sheet(title=safe_sheet) + summary = _build_location_sheet(ws, loc_name, rows, final_title) - safe_name = "".join(c for c in loc_name if c.isalnum() or c in (' ', '-', '_'))[:31] - ws = wb.create_sheet(title=safe_name) - summary = _build_location_sheet(ws, loc_name, day_rows, final_title) - loc_summaries.append(summary) + # Derive a date label for the summary sheet from started_at or first row + day_label = session_label or loc_name + if started_at_str: + try: + _dt = datetime.fromisoformat(started_at_str) + day_label = _dt.strftime('%-m/%-d/%Y') + if session_label: + day_label = session_label + except Exception: + pass - _build_summary_sheet(wb, date_str, project_name, loc_summaries) + _build_summary_sheet(wb, day_label, project_name, [summary]) xlsx_buf = io.BytesIO() wb.save(xlsx_buf) xlsx_buf.seek(0) - date_clean = date_str.replace('/', '-').replace(' ', '_') - xlsx_name = f"{project_name_clean}_{date_clean}_report.xlsx" + # Build a clean filename from label or location+date + label_clean = session_label or loc_name + label_clean = "".join(c for c in label_clean if c.isalnum() or c in (' ', '-', '_', '/')).strip().replace(' ', '_').replace('/', '-') + xlsx_name = f"{project_name_clean}_{label_clean}_report.xlsx" zf.writestr(xlsx_name, xlsx_buf.read()) zip_buffer.seek(0) diff --git a/templates/combined_report_wizard.html b/templates/combined_report_wizard.html index 8470ae9..a6c213f 100644 --- a/templates/combined_report_wizard.html +++ b/templates/combined_report_wizard.html @@ -74,105 +74,134 @@ - +
-

Time Filter

-

Applied to all locations. Leave blank to include all data.

- - -
- - - - -
- - -
-
- - -
-
- - -
-
- - -
- -
-
- - -
-
- - -
-
-
-
- - -
-
-
-

Locations to Include

-

- {{ locations|length }} of {{ locations|length }} selected -

-
+
+

Monitoring Sessions

- - + +
+

+ 0 session(s) selected — each selected session becomes one sheet in the ZIP. + Change the period type per session to control how stats are bucketed (Day vs Night). +

{% if locations %} -
{% for loc in locations %} - + {% set loc_name = loc.name %} + {% set sessions = loc.sessions %} +
+ + + +
+ + + +
+ {% for s in sessions %} + {% set pt_colors = { + 'weekday_day': 'bg-blue-100 text-blue-800 dark:bg-blue-900/30 dark:text-blue-300', + 'weekday_night': 'bg-indigo-100 text-indigo-800 dark:bg-indigo-900/30 dark:text-indigo-300', + 'weekend_day': 'bg-amber-100 text-amber-800 dark:bg-amber-900/30 dark:text-amber-300', + 'weekend_night': 'bg-purple-100 text-purple-800 dark:bg-purple-900/30 dark:text-purple-300', + } %} + {% set pt_labels = { + 'weekday_day': 'Weekday Day', + 'weekday_night': 'Weekday Night', + 'weekend_day': 'Weekend Day', + 'weekend_night': 'Weekend Night', + } %} +
+ + + + +
+
+ + {{ s.day_of_week }} {{ s.date_display }} + + {% if s.session_label %} + {{ s.session_label }} + {% endif %} + {% if s.status == 'recording' %} + + Recording + + {% endif %} +
+
+ {% if s.started_at %} + {{ s.started_at }} + {% endif %} + {% if s.duration_h is not none %} + {{ s.duration_h }}h {{ s.duration_m }}m + {% endif %} +
+
+ + +
+ + +
+
+ {% endfor %} +
+
{% endfor %} -
{% else %} -
-

No Leq measurement files found in this project.

-

Upload RND files with '_Leq_' in the filename to generate reports.

+
+ + + +

No monitoring sessions found.

+

Upload data files to create sessions first.

{% endif %}
- Cancel @@ -191,180 +220,173 @@
diff --git a/templates/partials/projects/session_list.html b/templates/partials/projects/session_list.html index 957f431..6b8b617 100644 --- a/templates/partials/projects/session_list.html +++ b/templates/partials/projects/session_list.html @@ -1,79 +1,149 @@ {% if sessions %} -
+
{% for item in sessions %} -
-
+ {% set s = item.session %} + {% set loc = item.location %} + {% set unit = item.unit %} + + {# Period display maps #} + {% set period_labels = { + 'weekday_day': 'Weekday Day', + 'weekday_night': 'Weekday Night', + 'weekend_day': 'Weekend Day', + 'weekend_night': 'Weekend Night', + } %} + {% set period_colors = { + 'weekday_day': 'bg-blue-100 text-blue-800 dark:bg-blue-900/30 dark:text-blue-300', + 'weekday_night': 'bg-indigo-100 text-indigo-800 dark:bg-indigo-900/30 dark:text-indigo-300', + 'weekend_day': 'bg-amber-100 text-amber-800 dark:bg-amber-900/30 dark:text-amber-300', + 'weekend_night': 'bg-purple-100 text-purple-800 dark:bg-purple-900/30 dark:text-purple-300', + } %} + +
+
-
-

- Session {{ item.session.id[:8] }}... -

- {% if item.session.status == 'recording' %} - - - Recording - - {% elif item.session.status == 'completed' %} - - Completed - - {% elif item.session.status == 'paused' %} - - Paused - - {% elif item.session.status == 'failed' %} - - Failed + + +
+ + {{ s.session_label or ('Session ' + s.id[:8] + '…') }} + + + + {% if s.status == 'recording' %} + + Recording + {% elif s.status == 'completed' %} + Completed + {% elif s.status == 'failed' %} + Failed {% endif %} + + +
+ + +
-
- {% if item.unit %} -
- Unit: - - {{ item.unit.id }} - + +
+ {% if loc %} +
+ + + + + {{ loc.name }}
{% endif %} -
- Started: - {{ item.session.started_at|local_datetime if item.session.started_at else 'N/A' }} -
- - {% if item.session.stopped_at %} -
- Ended: - {{ item.session.stopped_at|local_datetime }} + {% if s.started_at %} +
+ + + + {{ s.started_at|local_datetime }}
{% endif %} - {% if item.session.duration_seconds %} -
- Duration: - {{ (item.session.duration_seconds // 3600) }}h {{ ((item.session.duration_seconds % 3600) // 60) }}m + {% if s.stopped_at %} +
+ + + + Ended {{ s.stopped_at|local_datetime }} +
+ {% endif %} + + {% if s.duration_seconds %} +
+ + + + {{ (s.duration_seconds // 3600) }}h {{ ((s.duration_seconds % 3600) // 60) }}m +
+ {% endif %} + + {% if unit %} +
+ + + + {{ unit.id }} +
+ {% endif %} + + {% if s.device_model %} +
+ + + + {{ s.device_model }}
{% endif %}
- {% if item.session.notes %} -

- {{ item.session.notes }} -

+ {% if s.notes %} +

{{ s.notes }}

{% endif %}
-
- {% if item.session.status == 'recording' %} - +
+ {% if s.status == 'recording' %} + {% endif %} - @@ -84,24 +154,107 @@
{% else %}
- + -

No monitoring sessions yet

-

Schedule a session to get started

+

No monitoring sessions yet

+

Upload data to create sessions

{% endif %} -- 2.49.1 From 86010de60c0033df150abc6d2bd33807ce5bb1cb Mon Sep 17 00:00:00 2001 From: serversdown Date: Sat, 7 Mar 2026 01:32:49 +0000 Subject: [PATCH 29/31] Fix: combined report generation formatting fixed and cleaned up. (i think its good now?) --- backend/routers/project_locations.py | 4 +- backend/routers/projects.py | 209 ++++++++++++++++--------- templates/combined_report_preview.html | 17 +- 3 files changed, 151 insertions(+), 79 deletions(-) diff --git a/backend/routers/project_locations.py b/backend/routers/project_locations.py index 45c1e4d..44fcdd5 100644 --- a/backend/routers/project_locations.py +++ b/backend/routers/project_locations.py @@ -53,7 +53,9 @@ def _derive_period_type(dt: datetime) -> str: def _build_session_label(dt: datetime, location_name: str, period_type: str) -> str: - """Build a human-readable session label, e.g. 'NRL-1 — Sun 2/23 — Night'.""" + """Build a human-readable session label, e.g. 'NRL-1 — Sun 2/23 — Night'. + Uses started_at date as-is; user can correct period_type in the wizard. + """ day_abbr = dt.strftime("%a") date_str = f"{dt.month}/{dt.day}" period_str = { diff --git a/backend/routers/projects.py b/backend/routers/projects.py index 3bf23e9..5349788 100644 --- a/backend/routers/projects.py +++ b/backend/routers/projects.py @@ -3431,17 +3431,47 @@ def _build_location_data_from_sessions(project_id: str, db, selected_session_ids period_type = entry["period_type"] raw_rows = sorted(entry["rows"], key=lambda r: r.get('Start Time', '')) - spreadsheet_data = [] - for idx, row in enumerate(raw_rows, 1): + # Parse all rows to datetimes first so we can apply period-aware filtering + parsed = [] + for row in raw_rows: start_time_str = row.get('Start Time', '') - date_str = time_str = '' + dt = None if start_time_str: try: dt = datetime.strptime(start_time_str, '%Y/%m/%d %H:%M:%S') - date_str = dt.strftime('%Y-%m-%d') - time_str = dt.strftime('%H:%M') except ValueError: - date_str = start_time_str + pass + parsed.append((dt, row)) + + # Determine which rows to keep based on period_type + is_day_session = period_type in ('weekday_day', 'weekend_day') + target_date = None + if is_day_session: + # Day: 07:00–18:59 only, restricted to the LAST calendar date that has daytime rows + daytime_dates = sorted({ + dt.date() for dt, row in parsed + if dt and 7 <= dt.hour < 19 + }) + target_date = daytime_dates[-1] if daytime_dates else None + filtered = [ + (dt, row) for dt, row in parsed + if dt and dt.date() == target_date and 7 <= dt.hour < 19 + ] + else: + # Night: 19:00–06:59, spanning both calendar days — no date restriction + filtered = [ + (dt, row) for dt, row in parsed + if dt and (dt.hour >= 19 or dt.hour < 7) + ] + + # Fall back to all rows if filtering removed everything + if not filtered: + filtered = parsed + + spreadsheet_data = [] + for idx, (dt, row) in enumerate(filtered, 1): + date_str = dt.strftime('%Y-%m-%d') if dt else '' + time_str = dt.strftime('%H:%M') if dt else '' lmax = row.get('Lmax(Main)', '') ln1 = row.get('LN1(Main)', '') @@ -3458,14 +3488,33 @@ def _build_location_data_from_sessions(project_id: str, db, selected_session_ids period_type, # col index 7 — hidden, used by report gen for day/night bucketing ]) + # For the label/filename, use target_date (day sessions) or started_at (night sessions) + from datetime import timedelta as _td + started_at_dt = entry["started_at"] + if is_day_session and target_date: + # Use the actual target date from data filtering (last date with daytime rows) + label_dt = datetime.combine(target_date, datetime.min.time()) + else: + label_dt = started_at_dt + + # Rebuild session label using the correct label date + if label_dt and entry["loc_name"]: + period_str = {"weekday_day": "Day", "weekday_night": "Night", + "weekend_day": "Day", "weekend_night": "Night"}.get(period_type, "") + day_abbr = label_dt.strftime("%a") + date_label = f"{label_dt.month}/{label_dt.day}" + session_label = " — ".join(p for p in [loc_name, f"{day_abbr} {date_label}", period_str] if p) + else: + session_label = entry["session_label"] + location_data.append({ "session_id": session_id, "location_name": loc_name, - "session_label": entry["session_label"], + "session_label": session_label, "period_type": period_type, - "started_at": entry["started_at"].isoformat() if entry["started_at"] else "", + "started_at": label_dt.isoformat() if label_dt else "", "raw_count": len(raw_rows), - "filtered_count": len(raw_rows), + "filtered_count": len(filtered), "spreadsheet_data": spreadsheet_data, }) @@ -3569,7 +3618,7 @@ async def generate_combined_from_preview( tbl_bot_mid = Border(left=thin, right=thin, top=thin, bottom=med) tbl_bot_right = Border(left=thin, right=med, top=thin, bottom=med) - col_widths = [9.43, 10.14, 8.14, 12.86, 10.86, 10.86, 25.0, 6.43, 12.43, 12.43, 10.0, 14.71, 8.0, 6.43, 6.43, 6.43] + col_widths = [9.43, 10.14, 8.14, 12.86, 10.86, 10.86, 25.0, 6.43, 18.0, 18.0, 14.0, 14.0, 10.0, 8.0, 6.43, 6.43] def _build_location_sheet(ws, loc_name, day_rows, final_title): """Write one location's data onto ws. day_rows is a list of spreadsheet row arrays.""" @@ -3586,6 +3635,28 @@ async def generate_combined_from_preview( ws['A3'] = loc_name ws['A3'].font = f_title; ws['A3'].alignment = center_a ws.row_dimensions[3].height = 15.75 + + # Row 4: date range derived from the data rows + def _fmt_date(d): + try: + from datetime import datetime as _dt + return _dt.strptime(d, '%Y-%m-%d').strftime('%-m/%-d/%y') + except Exception: + return d + + dates_in_data = sorted({ + row[1] for row in day_rows + if len(row) > 1 and row[1] + }) + if len(dates_in_data) >= 2: + date_label = f"{_fmt_date(dates_in_data[0])} to {_fmt_date(dates_in_data[-1])}" + elif len(dates_in_data) == 1: + date_label = _fmt_date(dates_in_data[0]) + else: + date_label = "" + ws.merge_cells('A4:G4') + ws['A4'] = date_label + ws['A4'].font = f_data; ws['A4'].alignment = center_a ws.row_dimensions[4].height = 15 ws.row_dimensions[5].height = 15.75 @@ -3608,7 +3679,7 @@ async def generate_combined_from_preview( b_right = last_right if is_last else data_right test_num = row[0] if len(row) > 0 else row_idx + 1 - date_val = row[1] if len(row) > 1 else '' + date_val = _fmt_date(row[1]) if len(row) > 1 and row[1] else '' time_val = row[2] if len(row) > 2 else '' lmax = row[3] if len(row) > 3 else '' ln1 = row[4] if len(row) > 4 else '' @@ -3640,7 +3711,7 @@ async def generate_combined_from_preview( ln2_vals.append(ln2) if isinstance(lmax, (int, float)) and isinstance(ln1, (int, float)) and isinstance(ln2, (int, float)): - parsed_rows.append((row_period, float(lmax), float(ln1), float(ln2))) + parsed_rows.append((row_period, time_val, float(lmax), float(ln1), float(ln2))) data_end_row = data_start_row + len(day_rows) - 1 @@ -3667,116 +3738,112 @@ async def generate_combined_from_preview( ws.add_chart(chart, "H4") - note1 = ws.cell(row=28, column=9, value="Note: Averages are calculated by determining the arithmetic average ") - note1.font = f_data; note1.alignment = left_a - ws.merge_cells(start_row=28, start_column=9, end_row=28, end_column=14) - note2 = ws.cell(row=29, column=9, value="for each specified range of time intervals.") - note2.font = f_data; note2.alignment = left_a - ws.merge_cells(start_row=29, start_column=9, end_row=29, end_column=14) - hdr_fill_tbl = PatternFill(start_color="F2F2F2", end_color="F2F2F2", fill_type="solid") def _avg(vals): return round(sum(vals) / len(vals), 1) if vals else None def _max(vals): return round(max(vals), 1) if vals else None - # --- Dynamic period detection ---------------------------------------- - # Use the period_type stored on each row (from the session record). - # Rows without a period_type fall back to time-of-day detection. - # The four canonical types map to two display columns: - # Day -> "Daytime (7AM to 10PM)" - # Night -> "Nighttime (10PM to 7AM)" - PERIOD_TYPE_IS_DAY = {"weekday_day", "weekend_day"} + # --- Period bucketing ------------------------------------------------ + # For night sessions: split into Evening (7PM–10PM) and Nighttime (10PM–7AM). + # For day sessions: single Daytime bucket. + PERIOD_TYPE_IS_DAY = {"weekday_day", "weekend_day"} PERIOD_TYPE_IS_NIGHT = {"weekday_night", "weekend_night"} - day_rows_data = [] - night_rows_data = [] - for pt, lmx, l1, l2 in parsed_rows: + day_rows_data = [] + evening_rows_data = [] + night_rows_data = [] + + for pt, time_v, lmx, l1, l2 in parsed_rows: if pt in PERIOD_TYPE_IS_DAY: day_rows_data.append((lmx, l1, l2)) elif pt in PERIOD_TYPE_IS_NIGHT: - night_rows_data.append((lmx, l1, l2)) + # Split by time: Evening = 19:00–21:59, Nighttime = 22:00–06:59 + hour = 0 + if time_v and ':' in str(time_v): + try: + hour = int(str(time_v).split(':')[0]) + except ValueError: + pass + if 19 <= hour <= 21: + evening_rows_data.append((lmx, l1, l2)) + else: + night_rows_data.append((lmx, l1, l2)) else: - # No period_type — fall back to time-of-day (shouldn't happen for - # new uploads, but handles legacy data gracefully) - # We can't derive from time here since parsed_rows no longer stores dt. - # Put in day as a safe default. day_rows_data.append((lmx, l1, l2)) all_candidate_periods = [ ("Daytime (7AM to 10PM)", day_rows_data), + ("Evening (7PM to 10PM)", evening_rows_data), ("Nighttime (10PM to 7AM)", night_rows_data), ] active_periods = [(label, rows) for label, rows in all_candidate_periods if rows] - - # If nothing at all, show both columns empty if not active_periods: - active_periods = [("Daytime (7AM to 10PM)", []), ("Nighttime (10PM to 7AM)", [])] + active_periods = [("Daytime (7AM to 10PM)", [])] + + # --- Stats table — fixed position alongside the chart --- + note1 = ws.cell(row=28, column=9, + value="Note: Averages are calculated by determining the arithmetic average ") + note1.font = f_data; note1.alignment = left_a + ws.merge_cells(start_row=28, start_column=9, end_row=28, end_column=14) + note2 = ws.cell(row=29, column=9, + value="for each specified range of time intervals.") + note2.font = f_data; note2.alignment = left_a + ws.merge_cells(start_row=29, start_column=9, end_row=29, end_column=14) + + for r in [28, 29, 30, 31, 32, 33, 34]: + ws.row_dimensions[r].height = 15 + + tbl_hdr_row = 31 + tbl_data_row = 32 - # Build header row (row 31) with one merged pair of columns per active period # Layout: col 9 = row label, then pairs: (10,11), (12,13), (14,15) num_periods = len(active_periods) period_start_cols = [10 + i * 2 for i in range(num_periods)] - # Left/right border helpers for the header row def _hdr_border(i, n): - is_first = (i == 0) - is_last = (i == n - 1) return Border( - left=med if is_first else thin, - right=med if is_last else thin, - top=med, - bottom=thin, - ) - def _mid_border(i, n, is_data_last=False): - is_first = (i == 0) - is_last = (i == n - 1) - b = tbl_bot_mid if is_data_last else tbl_mid_mid - return Border( - left=med if is_first else thin, - right=med if is_last else thin, - top=b.top, - bottom=b.bottom, + left=med if i == 0 else thin, + right=med if i == n - 1 else thin, + top=med, bottom=thin, ) - c = ws.cell(row=31, column=9, value=""); c.border = tbl_top_left; c.font = f_bold - ws.row_dimensions[31].height = 30 + c = ws.cell(row=tbl_hdr_row, column=9, value=""); c.border = tbl_top_left; c.font = f_bold for i, (period_label, _) in enumerate(active_periods): sc = period_start_cols[i] - is_last_col = (i == num_periods - 1) - c = ws.cell(row=31, column=sc, value=period_label.replace('\n', ' ')) + c = ws.cell(row=tbl_hdr_row, column=sc, value=period_label) c.font = f_bold - c.alignment = Alignment(horizontal='center', vertical='center', wrap_text=True) + c.alignment = Alignment(horizontal='center', vertical='center', wrap_text=False) c.border = _hdr_border(i, num_periods) c.fill = hdr_fill_tbl - ws.merge_cells(start_row=31, start_column=sc, end_row=31, end_column=sc + 1) + ws.merge_cells(start_row=tbl_hdr_row, start_column=sc, + end_row=tbl_hdr_row, end_column=sc + 1) def write_stat_dynamic(row_num, row_label, period_vals_list, is_last=False): - bl = tbl_bot_left if is_last else tbl_mid_left lbl = ws.cell(row=row_num, column=9, value=row_label) - lbl.font = f_data; lbl.border = bl + lbl.font = f_data; lbl.border = tbl_bot_left if is_last else tbl_mid_left lbl.alignment = Alignment(horizontal='left', vertical='center') n = len(period_vals_list) for i, val in enumerate(period_vals_list): sc = period_start_cols[i] - is_last_col = (i == n - 1) val_str = f"{val} dBA" if val is not None else "" c = ws.cell(row=row_num, column=sc, value=val_str) c.font = f_bold c.alignment = Alignment(horizontal='center', vertical='center') c.border = Border( left=med if i == 0 else thin, - right=med if is_last_col else thin, + right=med if i == n - 1 else thin, top=tbl_bot_mid.top if is_last else tbl_mid_mid.top, bottom=tbl_bot_mid.bottom if is_last else tbl_mid_mid.bottom, ) - ws.merge_cells(start_row=row_num, start_column=sc, end_row=row_num, end_column=sc + 1) + ws.merge_cells(start_row=row_num, start_column=sc, + end_row=row_num, end_column=sc + 1) - write_stat_dynamic(32, "LAmax", + write_stat_dynamic(tbl_data_row, "LAmax", [_max([v[0] for v in rows]) for _, rows in active_periods]) - write_stat_dynamic(33, "LA01 Average", + write_stat_dynamic(tbl_data_row + 1, "LA01 Average", [_avg([v[1] for v in rows]) for _, rows in active_periods]) - write_stat_dynamic(34, "LA10 Average", + write_stat_dynamic(tbl_data_row + 2, "LA10 Average", [_avg([v[2] for v in rows]) for _, rows in active_periods], is_last=True) ws.sheet_properties.pageSetUpPr = PageSetupProperties(fitToPage=False) @@ -3798,7 +3865,7 @@ async def generate_combined_from_preview( } def _build_summary_sheet(wb, day_label, project_name, loc_summaries): - summary_ws = wb.create_sheet(title="Summary", index=0) + summary_ws = wb.create_sheet(title="Summary") summary_ws['A1'] = f"{report_title} - {project_name} - {day_label}" summary_ws['A1'].font = f_title summary_ws.merge_cells('A1:E1') @@ -3867,7 +3934,7 @@ async def generate_combined_from_preview( # Build a clean filename from label or location+date label_clean = session_label or loc_name label_clean = "".join(c for c in label_clean if c.isalnum() or c in (' ', '-', '_', '/')).strip().replace(' ', '_').replace('/', '-') - xlsx_name = f"{project_name_clean}_{label_clean}_report.xlsx" + xlsx_name = f"{label_clean}_{project_name_clean}_report.xlsx" zf.writestr(xlsx_name, xlsx_buf.read()) zip_buffer.seek(0) diff --git a/templates/combined_report_preview.html b/templates/combined_report_preview.html index 1439e33..4de3bb6 100644 --- a/templates/combined_report_preview.html +++ b/templates/combined_report_preview.html @@ -187,7 +187,7 @@ document.addEventListener('DOMContentLoaded', function() { const el = document.getElementById('spreadsheet-' + idx); if (!el) return; const opts = Object.assign({}, jssOptions, { data: loc.spreadsheet_data }); - spreadsheets[loc.location_name] = jspreadsheet(el, opts); + spreadsheets[idx] = jspreadsheet(el, opts); }); if (allLocationData.length > 0) { switchTab(0); @@ -228,9 +228,8 @@ function switchTab(idx) { } // Refresh jspreadsheet rendering after showing panel - const loc = allLocationData[idx]; - if (loc && spreadsheets[loc.location_name]) { - try { spreadsheets[loc.location_name].updateTable(); } catch(e) {} + if (spreadsheets[idx]) { + try { spreadsheets[idx].updateTable(); } catch(e) {} } } @@ -241,10 +240,14 @@ async function downloadCombinedReport() { btn.innerHTML = ' Generating ZIP...'; try { - const locations = allLocationData.map(function(loc) { + const locations = allLocationData.map(function(loc, idx) { return { - location_name: loc.location_name, - spreadsheet_data: spreadsheets[loc.location_name] ? spreadsheets[loc.location_name].getData() : loc.spreadsheet_data, + session_id: loc.session_id || '', + session_label: loc.session_label || '', + period_type: loc.period_type || '', + started_at: loc.started_at || '', + location_name: loc.location_name, + spreadsheet_data: spreadsheets[idx] ? spreadsheets[idx].getData() : loc.spreadsheet_data, }; }); -- 2.49.1 From e4ef065db83696de9118fa452273042a1e6b1fa1 Mon Sep 17 00:00:00 2001 From: serversdown Date: Sat, 7 Mar 2026 01:39:19 +0000 Subject: [PATCH 30/31] Version bump to v0.7.0. Docs: Update readme/changelog for 0.7.0 --- CHANGELOG.md | 55 +++++++++++++++++++++++++++++++++++++++++++++++++ README.md | 20 ++++++++++++++---- backend/main.py | 2 +- 3 files changed, 72 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7307c17..756074f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,60 @@ All notable changes to Terra-View will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.7.0] - 2026-03-07 + +### Added +- **Project Status Management**: Projects can now be placed `on_hold` or `archived`, with automatic cancellation of pending scheduled actions +- **Hard Delete Projects**: Support for permanently deleting projects, in addition to soft-delete with auto-pruning +- **Vibration Location Detail**: New dedicated template for vibration project location detail views +- **Vibration Project Isolation**: Vibration projects no longer show SLM-specific project tabs +- **Manual SD Card Data Upload**: Upload offline NRL data directly from SD card via ZIP or multi-file select + - Accepts `.rnd`/`.rnh` files; parses `.rnh` metadata for session start/stop times, serial number, and store name + - Creates `MonitoringSession` and `DataFile` records automatically; no unit assignment required + - Upload panel on NRL detail Data Files tab with inline feedback and auto-refresh via HTMX +- **Standalone SLM Type**: New SLM device mode that operates without a modem (direct IP connection) +- **NL32 Data Support**: Report generator and web viewer now support NL32 measurement data format +- **Combined Report Wizard**: Multi-session combined Excel report generation tool + - Wizard UI grouped by location with period type badges (day/night) + - Each selected session produces one `.xlsx` in a ZIP archive + - Period type filtering: day sessions keep last calendar date (7AM–6:59PM); night sessions span both days (7PM–6:59AM) +- **Combined Report Preview**: Interactive spreadsheet-style preview before generating combined reports +- **Chart Preview**: Live chart preview in the report generator matching final report styling +- **SLM Model Schemas**: Per-model configuration schemas for NL32, NL43, NL53 devices +- **Data Collection Mode**: Projects now store a data collection mode field with UI controls and migration + +### Changed +- **MonitoringSession rename**: `RecordingSession` renamed to `MonitoringSession` throughout codebase; DB table renamed from `recording_sessions` to `monitoring_sessions` + - Migration: `backend/migrate_rename_recording_to_monitoring_sessions.py` +- **Combined Report Split Logic**: Separate days now generate separate `.xlsx` files; NRLs remain one per sheet +- **Mass Upload Parsing**: Smarter file filtering — no longer imports unneeded Lp files or `.xlsx` files +- **SLM Start Time Grace Period**: 15-minute grace window added so data starting at session start time is included +- **NL32 Date Parsing**: Date now read from `start_time` field instead of file metadata +- **Project Data Labels**: Improved Jinja filters and UI label clarity for project data views + +### Fixed +- **Dev/Prod Separation**: Dev server now uses Docker Compose override; production deployment no longer affected by dev config +- **SLM Modal**: Bench/deploy toggle now correctly shown in SLM unit modal +- **Auto-Downloaded Files**: Files downloaded by scheduler now appear in project file listings +- **Duplicate Download**: Removed duplicate file download that occurred following a scheduled stop +- **SLMM Environment Variables**: `TCP_IDLE_TTL` and `TCP_MAX_AGE` now correctly passed to SLMM service via docker-compose + +### Technical Details +- `session_label` and `period_type` stored on `monitoring_sessions` table (migration: `migrate_add_session_period_type.py`) +- `device_model` stored on `monitoring_sessions` table (migration: `migrate_add_session_device_model.py`) +- Upload endpoint: `POST /api/projects/{project_id}/nrl/{location_id}/upload-data` +- ZIP filename format: `{session_label}_{project_name}_report.xlsx` (label first) + +### Migration Notes +Run the following migration scripts once per database before deploying: +```bash +python backend/migrate_rename_recording_to_monitoring_sessions.py +python backend/migrate_add_session_period_type.py +python backend/migrate_add_session_device_model.py +``` + +--- + ## [0.6.1] - 2026-02-16 ### Added @@ -445,6 +499,7 @@ No database migration required for v0.4.0. All new features use existing databas - Photo management per unit - Automated status categorization (OK/Pending/Missing) +[0.7.0]: https://github.com/serversdwn/seismo-fleet-manager/compare/v0.6.1...v0.7.0 [0.6.0]: https://github.com/serversdwn/seismo-fleet-manager/compare/v0.5.1...v0.6.0 [0.5.1]: https://github.com/serversdwn/seismo-fleet-manager/compare/v0.5.0...v0.5.1 [0.5.0]: https://github.com/serversdwn/seismo-fleet-manager/compare/v0.4.4...v0.5.0 diff --git a/README.md b/README.md index c32e9dd..3ea2995 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Terra-View v0.6.1 +# Terra-View v0.7.0 Backend API and HTMX-powered web interface for managing a mixed fleet of seismographs and field modems. Track deployments, monitor health in real time, merge roster intent with incoming telemetry, and control your fleet through a unified database and dashboard. ## Features @@ -496,6 +496,16 @@ docker compose down -v ## Release Highlights +### v0.7.0 — 2026-03-07 +- **Project Status Management**: On-hold and archived project states with automatic cancellation of pending actions +- **Manual SD Card Upload**: Upload offline NRL/SLM data directly from SD card (ZIP or multi-file); auto-creates monitoring sessions from `.rnh` metadata +- **Combined Report Wizard**: Multi-session Excel report generation with location grouping, period type filtering, and ZIP download +- **NL32 Support**: Report generator and web viewer now handle NL32 measurement data +- **Chart Preview**: Live chart preview in the report generator matching final output styling +- **Standalone SLM Mode**: SLMs can now be configured without a paired modem (direct IP) +- **Vibration Project Isolation**: Vibration project views no longer show SLM-specific tabs +- **MonitoringSession Rename**: `RecordingSession` renamed to `MonitoringSession` throughout; run migration before deploying + ### v0.6.1 — 2026-02-16 - **One-Off Recording Schedules**: Schedule single recordings with specific start/end datetimes - **Bidirectional Pairing Sync**: Device-modem pairing now updates both sides automatically @@ -584,11 +594,13 @@ MIT ## Version -**Current: 0.6.1** — One-off recording schedules, bidirectional pairing sync, scheduler timezone fix (2026-02-16) +**Current: 0.7.0** — Project status management, manual SD card upload, combined report wizard, NL32 support, MonitoringSession rename (2026-03-07) -Previous: 0.6.0 — Calendar & reservation mode, device pairing interface, calibration UX overhaul, modem dashboard enhancements (2026-02-06) +Previous: 0.6.1 — One-off recording schedules, bidirectional pairing sync, scheduler timezone fix (2026-02-16) -Previous: 0.5.1 — Dashboard schedule view with today's actions panel, new Terra-View branding and logo rework (2026-01-27) +0.6.0 — Calendar & reservation mode, device pairing interface, calibration UX overhaul, modem dashboard enhancements (2026-02-06) + +0.5.1 — Dashboard schedule view with today's actions panel, new Terra-View branding and logo rework (2026-01-27) 0.4.4 — Recurring schedules, alerting UI, report templates + RND viewer, and SLM workflow polish (2026-01-23) diff --git a/backend/main.py b/backend/main.py index d5a2f8c..fe95f59 100644 --- a/backend/main.py +++ b/backend/main.py @@ -30,7 +30,7 @@ Base.metadata.create_all(bind=engine) ENVIRONMENT = os.getenv("ENVIRONMENT", "production") # Initialize FastAPI app -VERSION = "0.6.1" +VERSION = "0.7.0" if ENVIRONMENT == "development": _build = os.getenv("BUILD_NUMBER", "0") if _build and _build != "0": -- 2.49.1 From e89a04f58c8b0d2af8fd06f9ce09f4ea5a4bed41 Mon Sep 17 00:00:00 2001 From: serversdown Date: Sat, 7 Mar 2026 07:16:10 +0000 Subject: [PATCH 31/31] fix: SLM report line graph border added, combined report wizard spacing fix. --- backend/routers/projects.py | 20 ++++++++++++++++++++ templates/combined_report_wizard.html | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/backend/routers/projects.py b/backend/routers/projects.py index 5349788..29e71cd 100644 --- a/backend/routers/projects.py +++ b/backend/routers/projects.py @@ -2038,6 +2038,7 @@ async def generate_excel_report( import openpyxl from openpyxl.chart import LineChart, Reference from openpyxl.chart.label import DataLabelList + from openpyxl.chart.shapes import GraphicalProperties from openpyxl.styles import Font, Alignment, Border, Side, PatternFill from openpyxl.utils import get_column_letter except ImportError: @@ -2340,6 +2341,10 @@ async def generate_excel_report( chart.series[2].graphicalProperties.line.solidFill = "0070C0" chart.series[2].graphicalProperties.line.width = 19050 + _plot_border = GraphicalProperties() + _plot_border.ln.solidFill = "000000" + _plot_border.ln.w = 12700 + chart.plot_area.spPr = _plot_border ws.add_chart(chart, "H4") # --- Stats table: note at I28-I29, headers at I31, data rows 32-34 --- @@ -2679,6 +2684,7 @@ async def generate_report_from_preview( try: import openpyxl from openpyxl.chart import LineChart, Reference + from openpyxl.chart.shapes import GraphicalProperties from openpyxl.styles import Font, Alignment, Border, Side, PatternFill from openpyxl.utils import get_column_letter except ImportError: @@ -2821,6 +2827,10 @@ async def generate_report_from_preview( chart.series[1].graphicalProperties.line.width = 19050 chart.series[2].graphicalProperties.line.solidFill = "0070C0" chart.series[2].graphicalProperties.line.width = 19050 + _plot_border = GraphicalProperties() + _plot_border.ln.solidFill = "000000" + _plot_border.ln.w = 12700 + chart.plot_area.spPr = _plot_border ws.add_chart(chart, "H4") # --- Stats block starting at I28 --- @@ -2957,6 +2967,7 @@ async def generate_combined_excel_report( try: import openpyxl from openpyxl.chart import LineChart, Reference + from openpyxl.chart.shapes import GraphicalProperties from openpyxl.styles import Font, Alignment, Border, Side, PatternFill from openpyxl.utils import get_column_letter except ImportError: @@ -3163,6 +3174,10 @@ async def generate_combined_excel_report( chart.series[2].graphicalProperties.line.solidFill = "0070C0" chart.series[2].graphicalProperties.line.width = 19050 + _plot_border = GraphicalProperties() + _plot_border.ln.solidFill = "000000" + _plot_border.ln.w = 12700 + chart.plot_area.spPr = _plot_border ws.add_chart(chart, "H4") # Stats table: note at I28-I29, headers at I31, data rows 32-34, border row 35 @@ -3569,6 +3584,7 @@ async def generate_combined_from_preview( try: import openpyxl from openpyxl.chart import LineChart, Reference + from openpyxl.chart.shapes import GraphicalProperties from openpyxl.styles import Font, Alignment, Border, Side, PatternFill from openpyxl.utils import get_column_letter from openpyxl.worksheet.properties import PageSetupProperties @@ -3736,6 +3752,10 @@ async def generate_combined_from_preview( chart.series[2].graphicalProperties.line.solidFill = "0070C0" chart.series[2].graphicalProperties.line.width = 19050 + _plot_border = GraphicalProperties() + _plot_border.ln.solidFill = "000000" + _plot_border.ln.w = 12700 + chart.plot_area.spPr = _plot_border ws.add_chart(chart, "H4") hdr_fill_tbl = PatternFill(start_color="F2F2F2", end_color="F2F2F2", fill_type="solid") diff --git a/templates/combined_report_wizard.html b/templates/combined_report_wizard.html index a6c213f..264f185 100644 --- a/templates/combined_report_wizard.html +++ b/templates/combined_report_wizard.html @@ -75,7 +75,7 @@
-
+

Monitoring Sessions

-- 2.49.1