feat: Add report templates API for CRUD operations and implement SLM settings modal
- Implemented a new API router for managing report templates, including endpoints for listing, creating, retrieving, updating, and deleting templates. - Added a new HTML partial for a unified SLM settings modal, allowing users to configure SLM settings with dynamic modem selection and FTP credentials. - Created a report preview page with an editable data table using jspreadsheet, enabling users to modify report details and download the report as an Excel file.
This commit is contained in:
@@ -1343,6 +1343,12 @@ async def generate_excel_report(
|
||||
file_id: str,
|
||||
report_title: str = Query("Background Noise Study", description="Title for the report"),
|
||||
location_name: str = Query("", description="Location name (e.g., 'NRL 1 - West Side')"),
|
||||
project_name: str = Query("", description="Project name override"),
|
||||
client_name: str = Query("", description="Client name for report header"),
|
||||
start_time: str = Query("", description="Filter start time (HH:MM format, e.g., '19:00')"),
|
||||
end_time: str = Query("", description="Filter end time (HH:MM format, e.g., '07:00')"),
|
||||
start_date: str = Query("", description="Filter start date (YYYY-MM-DD format)"),
|
||||
end_date: str = Query("", description="Filter end date (YYYY-MM-DD format)"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
@@ -1354,6 +1360,10 @@ async def generate_excel_report(
|
||||
- Line chart visualization
|
||||
- Time period summary statistics
|
||||
|
||||
Time filtering:
|
||||
- start_time/end_time: Filter to time window (handles overnight like 19:00-07:00)
|
||||
- start_date/end_date: Filter to date range
|
||||
|
||||
Column mapping from RND to Report:
|
||||
- Lmax(Main) -> LAmax (dBA)
|
||||
- LN1(Main) -> LA01 (dBA) [L1 percentile]
|
||||
@@ -1432,6 +1442,99 @@ async def generate_excel_report(
|
||||
logger.error(f"Error reading RND file: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Error reading file: {str(e)}")
|
||||
|
||||
# Apply time and date filtering
|
||||
def filter_rows_by_time(rows, filter_start_time, filter_end_time, filter_start_date, filter_end_date):
|
||||
"""Filter rows by time window and date range."""
|
||||
if not filter_start_time and not filter_end_time and not filter_start_date and not filter_end_date:
|
||||
return rows
|
||||
|
||||
filtered = []
|
||||
|
||||
# Parse time filters
|
||||
start_hour = start_minute = end_hour = end_minute = None
|
||||
if filter_start_time:
|
||||
try:
|
||||
parts = filter_start_time.split(':')
|
||||
start_hour = int(parts[0])
|
||||
start_minute = int(parts[1]) if len(parts) > 1 else 0
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
if filter_end_time:
|
||||
try:
|
||||
parts = filter_end_time.split(':')
|
||||
end_hour = int(parts[0])
|
||||
end_minute = int(parts[1]) if len(parts) > 1 else 0
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
# Parse date filters
|
||||
start_dt = end_dt = None
|
||||
if filter_start_date:
|
||||
try:
|
||||
start_dt = datetime.strptime(filter_start_date, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
pass
|
||||
if filter_end_date:
|
||||
try:
|
||||
end_dt = datetime.strptime(filter_end_date, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
for row in rows:
|
||||
start_time_str = row.get('Start Time', '')
|
||||
if not start_time_str:
|
||||
continue
|
||||
|
||||
try:
|
||||
dt = datetime.strptime(start_time_str, '%Y/%m/%d %H:%M:%S')
|
||||
row_date = dt.date()
|
||||
row_hour = dt.hour
|
||||
row_minute = dt.minute
|
||||
|
||||
# Date filtering
|
||||
if start_dt and row_date < start_dt:
|
||||
continue
|
||||
if end_dt and row_date > end_dt:
|
||||
continue
|
||||
|
||||
# Time filtering (handle overnight ranges like 19:00-07:00)
|
||||
if start_hour is not None and end_hour is not None:
|
||||
row_time_minutes = row_hour * 60 + row_minute
|
||||
start_time_minutes = start_hour * 60 + start_minute
|
||||
end_time_minutes = end_hour * 60 + end_minute
|
||||
|
||||
if start_time_minutes > end_time_minutes:
|
||||
# Overnight range (e.g., 19:00-07:00)
|
||||
if not (row_time_minutes >= start_time_minutes or row_time_minutes < end_time_minutes):
|
||||
continue
|
||||
else:
|
||||
# Same day range (e.g., 07:00-19:00)
|
||||
if not (start_time_minutes <= row_time_minutes < end_time_minutes):
|
||||
continue
|
||||
|
||||
filtered.append(row)
|
||||
except ValueError:
|
||||
# If we can't parse the time, include the row anyway
|
||||
filtered.append(row)
|
||||
|
||||
return filtered
|
||||
|
||||
# Apply filters
|
||||
original_count = len(rnd_rows)
|
||||
rnd_rows = filter_rows_by_time(rnd_rows, start_time, end_time, start_date, end_date)
|
||||
|
||||
if not rnd_rows:
|
||||
time_filter_desc = ""
|
||||
if start_time and end_time:
|
||||
time_filter_desc = f" between {start_time} and {end_time}"
|
||||
if start_date or end_date:
|
||||
time_filter_desc += f" from {start_date or 'start'} to {end_date or 'end'}"
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"No data found after applying filters{time_filter_desc}. Original file had {original_count} rows."
|
||||
)
|
||||
|
||||
# Create Excel workbook
|
||||
wb = openpyxl.Workbook()
|
||||
ws = wb.active
|
||||
@@ -1449,13 +1552,19 @@ async def generate_excel_report(
|
||||
header_fill = PatternFill(start_color="DAEEF3", end_color="DAEEF3", fill_type="solid")
|
||||
|
||||
# Row 1: Report title
|
||||
final_project_name = project_name if project_name else (project.name if project else "")
|
||||
final_title = report_title
|
||||
if project:
|
||||
final_title = f"{report_title} - {project.name}"
|
||||
if final_project_name:
|
||||
final_title = f"{report_title} - {final_project_name}"
|
||||
ws['A1'] = final_title
|
||||
ws['A1'].font = title_font
|
||||
ws.merge_cells('A1:G1')
|
||||
|
||||
# Row 2: Client name (if provided)
|
||||
if client_name:
|
||||
ws['A2'] = f"Client: {client_name}"
|
||||
ws['A2'].font = Font(italic=True, size=10)
|
||||
|
||||
# Row 3: Location name
|
||||
final_location = location_name
|
||||
if not final_location and location:
|
||||
@@ -1464,6 +1573,15 @@ async def generate_excel_report(
|
||||
ws['A3'] = final_location
|
||||
ws['A3'].font = Font(bold=True, size=11)
|
||||
|
||||
# Row 4: Time filter info (if applied)
|
||||
if start_time and end_time:
|
||||
filter_info = f"Time Filter: {start_time} - {end_time}"
|
||||
if start_date or end_date:
|
||||
filter_info += f" | Date Range: {start_date or 'start'} to {end_date or 'end'}"
|
||||
filter_info += f" | {len(rnd_rows)} of {original_count} rows"
|
||||
ws['A4'] = filter_info
|
||||
ws['A4'].font = Font(italic=True, size=9, color="666666")
|
||||
|
||||
# Row 7: Headers
|
||||
headers = ['Test Increment #', 'Date', 'Time', 'LAmax (dBA)', 'LA01 (dBA)', 'LA10 (dBA)', 'Comments']
|
||||
for col, header in enumerate(headers, 1):
|
||||
@@ -1650,6 +1768,364 @@ async def generate_excel_report(
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{project_id}/files/{file_id}/preview-report")
|
||||
async def preview_report_data(
|
||||
request: Request,
|
||||
project_id: str,
|
||||
file_id: str,
|
||||
report_title: str = Query("Background Noise Study", description="Title for the report"),
|
||||
location_name: str = Query("", description="Location name"),
|
||||
project_name: str = Query("", description="Project name override"),
|
||||
client_name: str = Query("", description="Client name"),
|
||||
start_time: str = Query("", description="Filter start time (HH:MM format)"),
|
||||
end_time: str = Query("", description="Filter end time (HH:MM format)"),
|
||||
start_date: str = Query("", description="Filter start date (YYYY-MM-DD format)"),
|
||||
end_date: str = Query("", description="Filter end date (YYYY-MM-DD format)"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Preview report data for editing in jspreadsheet.
|
||||
Returns an HTML page with the spreadsheet editor.
|
||||
"""
|
||||
from backend.models import DataFile, ReportTemplate
|
||||
from pathlib import Path
|
||||
import csv
|
||||
|
||||
# Get the file record
|
||||
file_record = db.query(DataFile).filter_by(id=file_id).first()
|
||||
if not file_record:
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
|
||||
# Verify file belongs to this project
|
||||
session = db.query(RecordingSession).filter_by(id=file_record.session_id).first()
|
||||
if not session or session.project_id != project_id:
|
||||
raise HTTPException(status_code=403, detail="File does not belong to this project")
|
||||
|
||||
# Get related data for report context
|
||||
project = db.query(Project).filter_by(id=project_id).first()
|
||||
location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None
|
||||
|
||||
# Build full file path
|
||||
file_path = Path("data") / file_record.file_path
|
||||
if not file_path.exists():
|
||||
raise HTTPException(status_code=404, detail="File not found on disk")
|
||||
|
||||
# Validate this is a Leq file
|
||||
if '_Leq_' not in file_record.file_path:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Reports can only be generated from Leq files (15-minute averaged data)."
|
||||
)
|
||||
|
||||
# Read and parse the Leq RND file
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
||||
content = f.read()
|
||||
|
||||
reader = csv.DictReader(io.StringIO(content))
|
||||
rnd_rows = []
|
||||
for row in reader:
|
||||
cleaned_row = {}
|
||||
for key, value in row.items():
|
||||
if key:
|
||||
cleaned_key = key.strip()
|
||||
cleaned_value = value.strip() if value else ''
|
||||
if cleaned_value and cleaned_value not in ['-.-', '-', '']:
|
||||
try:
|
||||
cleaned_value = float(cleaned_value)
|
||||
except ValueError:
|
||||
pass
|
||||
elif cleaned_value in ['-.-', '-']:
|
||||
cleaned_value = None
|
||||
cleaned_row[cleaned_key] = cleaned_value
|
||||
rnd_rows.append(cleaned_row)
|
||||
|
||||
if not rnd_rows:
|
||||
raise HTTPException(status_code=400, detail="No data found in RND file")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading RND file: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Error reading file: {str(e)}")
|
||||
|
||||
# Apply time and date filtering (same logic as generate-report)
|
||||
def filter_rows(rows, filter_start_time, filter_end_time, filter_start_date, filter_end_date):
|
||||
if not filter_start_time and not filter_end_time and not filter_start_date and not filter_end_date:
|
||||
return rows
|
||||
|
||||
filtered = []
|
||||
start_hour = start_minute = end_hour = end_minute = None
|
||||
|
||||
if filter_start_time:
|
||||
try:
|
||||
parts = filter_start_time.split(':')
|
||||
start_hour = int(parts[0])
|
||||
start_minute = int(parts[1]) if len(parts) > 1 else 0
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
if filter_end_time:
|
||||
try:
|
||||
parts = filter_end_time.split(':')
|
||||
end_hour = int(parts[0])
|
||||
end_minute = int(parts[1]) if len(parts) > 1 else 0
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
start_dt = end_dt = None
|
||||
if filter_start_date:
|
||||
try:
|
||||
start_dt = datetime.strptime(filter_start_date, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
pass
|
||||
if filter_end_date:
|
||||
try:
|
||||
end_dt = datetime.strptime(filter_end_date, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
for row in rows:
|
||||
start_time_str = row.get('Start Time', '')
|
||||
if not start_time_str:
|
||||
continue
|
||||
|
||||
try:
|
||||
dt = datetime.strptime(start_time_str, '%Y/%m/%d %H:%M:%S')
|
||||
row_date = dt.date()
|
||||
row_hour = dt.hour
|
||||
row_minute = dt.minute
|
||||
|
||||
if start_dt and row_date < start_dt:
|
||||
continue
|
||||
if end_dt and row_date > end_dt:
|
||||
continue
|
||||
|
||||
if start_hour is not None and end_hour is not None:
|
||||
row_time_minutes = row_hour * 60 + row_minute
|
||||
start_time_minutes = start_hour * 60 + start_minute
|
||||
end_time_minutes = end_hour * 60 + end_minute
|
||||
|
||||
if start_time_minutes > end_time_minutes:
|
||||
if not (row_time_minutes >= start_time_minutes or row_time_minutes < end_time_minutes):
|
||||
continue
|
||||
else:
|
||||
if not (start_time_minutes <= row_time_minutes < end_time_minutes):
|
||||
continue
|
||||
|
||||
filtered.append(row)
|
||||
except ValueError:
|
||||
filtered.append(row)
|
||||
|
||||
return filtered
|
||||
|
||||
original_count = len(rnd_rows)
|
||||
rnd_rows = filter_rows(rnd_rows, start_time, end_time, start_date, end_date)
|
||||
|
||||
# Convert to spreadsheet data format (array of arrays)
|
||||
spreadsheet_data = []
|
||||
for idx, row in enumerate(rnd_rows, 1):
|
||||
start_time_str = row.get('Start Time', '')
|
||||
date_str = ''
|
||||
time_str = ''
|
||||
if start_time_str:
|
||||
try:
|
||||
dt = datetime.strptime(start_time_str, '%Y/%m/%d %H:%M:%S')
|
||||
date_str = dt.strftime('%Y-%m-%d')
|
||||
time_str = dt.strftime('%H:%M:%S')
|
||||
except ValueError:
|
||||
date_str = start_time_str
|
||||
time_str = ''
|
||||
|
||||
lmax = row.get('Lmax(Main)', '')
|
||||
ln1 = row.get('LN1(Main)', '')
|
||||
ln2 = row.get('LN2(Main)', '')
|
||||
|
||||
spreadsheet_data.append([
|
||||
idx, # Test #
|
||||
date_str,
|
||||
time_str,
|
||||
lmax if lmax else '',
|
||||
ln1 if ln1 else '',
|
||||
ln2 if ln2 else '',
|
||||
'' # Comments
|
||||
])
|
||||
|
||||
# Prepare context data
|
||||
final_project_name = project_name if project_name else (project.name if project else "")
|
||||
final_location = location_name if location_name else (location.name if location else "")
|
||||
|
||||
# Get templates for the dropdown
|
||||
templates = db.query(ReportTemplate).all()
|
||||
|
||||
return templates.TemplateResponse("report_preview.html", {
|
||||
"request": request,
|
||||
"project_id": project_id,
|
||||
"file_id": file_id,
|
||||
"project": project,
|
||||
"location": location,
|
||||
"file": file_record,
|
||||
"spreadsheet_data": spreadsheet_data,
|
||||
"report_title": report_title,
|
||||
"project_name": final_project_name,
|
||||
"client_name": client_name,
|
||||
"location_name": final_location,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"original_count": original_count,
|
||||
"filtered_count": len(rnd_rows),
|
||||
"templates": templates,
|
||||
})
|
||||
|
||||
|
||||
@router.post("/{project_id}/files/{file_id}/generate-from-preview")
|
||||
async def generate_report_from_preview(
|
||||
project_id: str,
|
||||
file_id: str,
|
||||
data: dict,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Generate an Excel report from edited spreadsheet data.
|
||||
Accepts the edited data from jspreadsheet and creates the final Excel file.
|
||||
"""
|
||||
from backend.models import DataFile
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import openpyxl
|
||||
from openpyxl.chart import LineChart, Reference
|
||||
from openpyxl.styles import Font, Alignment, Border, Side, PatternFill
|
||||
from openpyxl.utils import get_column_letter
|
||||
except ImportError:
|
||||
raise HTTPException(status_code=500, detail="openpyxl is not installed")
|
||||
|
||||
# Get the file record for filename generation
|
||||
file_record = db.query(DataFile).filter_by(id=file_id).first()
|
||||
if not file_record:
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
|
||||
session = db.query(RecordingSession).filter_by(id=file_record.session_id).first()
|
||||
if not session or session.project_id != project_id:
|
||||
raise HTTPException(status_code=403, detail="File does not belong to this project")
|
||||
|
||||
project = db.query(Project).filter_by(id=project_id).first()
|
||||
location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None
|
||||
|
||||
# Extract data from request
|
||||
spreadsheet_data = data.get('data', [])
|
||||
report_title = data.get('report_title', 'Background Noise Study')
|
||||
project_name = data.get('project_name', project.name if project else '')
|
||||
client_name = data.get('client_name', '')
|
||||
location_name = data.get('location_name', location.name if location else '')
|
||||
time_filter = data.get('time_filter', '')
|
||||
|
||||
if not spreadsheet_data:
|
||||
raise HTTPException(status_code=400, detail="No data provided")
|
||||
|
||||
# Create Excel workbook
|
||||
wb = openpyxl.Workbook()
|
||||
ws = wb.active
|
||||
ws.title = "Sound Level Data"
|
||||
|
||||
# Styles
|
||||
title_font = Font(bold=True, size=14)
|
||||
header_font = Font(bold=True, size=10)
|
||||
thin_border = Border(
|
||||
left=Side(style='thin'),
|
||||
right=Side(style='thin'),
|
||||
top=Side(style='thin'),
|
||||
bottom=Side(style='thin')
|
||||
)
|
||||
header_fill = PatternFill(start_color="DAEEF3", end_color="DAEEF3", fill_type="solid")
|
||||
|
||||
# Row 1: Title
|
||||
final_title = f"{report_title} - {project_name}" if project_name else report_title
|
||||
ws['A1'] = final_title
|
||||
ws['A1'].font = title_font
|
||||
ws.merge_cells('A1:G1')
|
||||
|
||||
# Row 2: Client
|
||||
if client_name:
|
||||
ws['A2'] = f"Client: {client_name}"
|
||||
ws['A2'].font = Font(italic=True, size=10)
|
||||
|
||||
# Row 3: Location
|
||||
if location_name:
|
||||
ws['A3'] = location_name
|
||||
ws['A3'].font = Font(bold=True, size=11)
|
||||
|
||||
# Row 4: Time filter info
|
||||
if time_filter:
|
||||
ws['A4'] = time_filter
|
||||
ws['A4'].font = Font(italic=True, size=9, color="666666")
|
||||
|
||||
# Row 7: Headers
|
||||
headers = ['Test Increment #', 'Date', 'Time', 'LAmax (dBA)', 'LA01 (dBA)', 'LA10 (dBA)', 'Comments']
|
||||
for col, header in enumerate(headers, 1):
|
||||
cell = ws.cell(row=7, column=col, value=header)
|
||||
cell.font = header_font
|
||||
cell.border = thin_border
|
||||
cell.fill = header_fill
|
||||
cell.alignment = Alignment(horizontal='center')
|
||||
|
||||
# Column widths
|
||||
column_widths = [16, 12, 10, 12, 12, 12, 40]
|
||||
for i, width in enumerate(column_widths, 1):
|
||||
ws.column_dimensions[get_column_letter(i)].width = width
|
||||
|
||||
# Data rows
|
||||
data_start_row = 8
|
||||
for idx, row_data in enumerate(spreadsheet_data):
|
||||
data_row = data_start_row + idx
|
||||
for col, value in enumerate(row_data, 1):
|
||||
cell = ws.cell(row=data_row, column=col, value=value if value != '' else None)
|
||||
cell.border = thin_border
|
||||
|
||||
data_end_row = data_start_row + len(spreadsheet_data) - 1
|
||||
|
||||
# Add chart if we have data
|
||||
if len(spreadsheet_data) > 0:
|
||||
chart = LineChart()
|
||||
chart.title = f"{location_name or 'Sound Level Data'} - Background Noise Study"
|
||||
chart.style = 10
|
||||
chart.y_axis.title = "Sound Level (dBA)"
|
||||
chart.x_axis.title = "Test Increment"
|
||||
chart.height = 12
|
||||
chart.width = 20
|
||||
|
||||
data_ref = Reference(ws, min_col=4, min_row=7, max_col=6, max_row=data_end_row)
|
||||
categories = Reference(ws, min_col=1, min_row=data_start_row, max_row=data_end_row)
|
||||
|
||||
chart.add_data(data_ref, titles_from_data=True)
|
||||
chart.set_categories(categories)
|
||||
|
||||
if len(chart.series) >= 3:
|
||||
chart.series[0].graphicalProperties.line.solidFill = "FF0000"
|
||||
chart.series[1].graphicalProperties.line.solidFill = "00B050"
|
||||
chart.series[2].graphicalProperties.line.solidFill = "0070C0"
|
||||
|
||||
ws.add_chart(chart, "I3")
|
||||
|
||||
# Save to buffer
|
||||
output = io.BytesIO()
|
||||
wb.save(output)
|
||||
output.seek(0)
|
||||
|
||||
# Generate filename
|
||||
filename = file_record.file_path.split('/')[-1].replace('.rnd', '')
|
||||
if location:
|
||||
filename = f"{location.name}_{filename}"
|
||||
filename = f"{filename}_report.xlsx"
|
||||
filename = "".join(c for c in filename if c.isalnum() or c in ('_', '-', '.')).rstrip()
|
||||
|
||||
return StreamingResponse(
|
||||
output,
|
||||
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
headers={"Content-Disposition": f'attachment; filename="{filename}"'}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{project_id}/generate-combined-report")
|
||||
async def generate_combined_excel_report(
|
||||
project_id: str,
|
||||
|
||||
Reference in New Issue
Block a user