feat: Add report templates API for CRUD operations and implement SLM settings modal
- Implemented a new API router for managing report templates, including endpoints for listing, creating, retrieving, updating, and deleting templates. - Added a new HTML partial for a unified SLM settings modal, allowing users to configure SLM settings with dynamic modem selection and FTP credentials. - Created a report preview page with an editable data table using jspreadsheet, enabling users to modify report details and download the report as an Excel file.
This commit is contained in:
@@ -101,6 +101,10 @@ app.include_router(projects.router)
|
||||
app.include_router(project_locations.router)
|
||||
app.include_router(scheduler.router)
|
||||
|
||||
# Report templates router
|
||||
from backend.routers import report_templates
|
||||
app.include_router(report_templates.router)
|
||||
|
||||
# Start scheduler service on application startup
|
||||
from backend.services.scheduler import start_scheduler, stop_scheduler
|
||||
|
||||
|
||||
88
backend/migrate_add_report_templates.py
Normal file
88
backend/migrate_add_report_templates.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Migration script to add report_templates table.
|
||||
|
||||
This creates a new table for storing report generation configurations:
|
||||
- Template name and project association
|
||||
- Time filtering settings (start/end time)
|
||||
- Date range filtering (optional)
|
||||
- Report title defaults
|
||||
|
||||
Run this script once to migrate an existing database.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
# Database path
|
||||
DB_PATH = "./data/seismo_fleet.db"
|
||||
|
||||
def migrate_database():
|
||||
"""Create report_templates table"""
|
||||
|
||||
if not os.path.exists(DB_PATH):
|
||||
print(f"Database not found at {DB_PATH}")
|
||||
print("The database will be created automatically when you run the application.")
|
||||
return
|
||||
|
||||
print(f"Migrating database: {DB_PATH}")
|
||||
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if report_templates table already exists
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='report_templates'")
|
||||
table_exists = cursor.fetchone()
|
||||
|
||||
if table_exists:
|
||||
print("Migration already applied - report_templates table exists")
|
||||
conn.close()
|
||||
return
|
||||
|
||||
print("Creating report_templates table...")
|
||||
|
||||
try:
|
||||
cursor.execute("""
|
||||
CREATE TABLE report_templates (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
project_id TEXT,
|
||||
report_title TEXT DEFAULT 'Background Noise Study',
|
||||
start_time TEXT,
|
||||
end_time TEXT,
|
||||
start_date TEXT,
|
||||
end_date TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
print(" ✓ Created report_templates table")
|
||||
|
||||
# Insert default templates
|
||||
import uuid
|
||||
|
||||
default_templates = [
|
||||
(str(uuid.uuid4()), "Nighttime (7PM-7AM)", None, "Background Noise Study", "19:00", "07:00", None, None),
|
||||
(str(uuid.uuid4()), "Daytime (7AM-7PM)", None, "Background Noise Study", "07:00", "19:00", None, None),
|
||||
(str(uuid.uuid4()), "Full Day (All Data)", None, "Background Noise Study", None, None, None, None),
|
||||
]
|
||||
|
||||
cursor.executemany("""
|
||||
INSERT INTO report_templates (id, name, project_id, report_title, start_time, end_time, start_date, end_date)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", default_templates)
|
||||
print(" ✓ Inserted default templates (Nighttime, Daytime, Full Day)")
|
||||
|
||||
conn.commit()
|
||||
print("\nMigration completed successfully!")
|
||||
|
||||
except sqlite3.Error as e:
|
||||
print(f"\nError during migration: {e}")
|
||||
conn.rollback()
|
||||
raise
|
||||
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
migrate_database()
|
||||
@@ -278,3 +278,25 @@ class DataFile(Base):
|
||||
file_metadata = Column(Text, nullable=True) # JSON
|
||||
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
class ReportTemplate(Base):
|
||||
"""
|
||||
Report templates: saved configurations for generating Excel reports.
|
||||
Allows users to save time filter presets, titles, etc. for reuse.
|
||||
"""
|
||||
__tablename__ = "report_templates"
|
||||
|
||||
id = Column(String, primary_key=True, index=True) # UUID
|
||||
name = Column(String, nullable=False) # "Nighttime Report", "Full Day Report"
|
||||
project_id = Column(String, nullable=True) # Optional: project-specific template
|
||||
|
||||
# Template settings
|
||||
report_title = Column(String, default="Background Noise Study")
|
||||
start_time = Column(String, nullable=True) # "19:00" format
|
||||
end_time = Column(String, nullable=True) # "07:00" format
|
||||
start_date = Column(String, nullable=True) # "2025-01-15" format (optional)
|
||||
end_date = Column(String, nullable=True) # "2025-01-20" format (optional)
|
||||
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
@@ -1343,6 +1343,12 @@ async def generate_excel_report(
|
||||
file_id: str,
|
||||
report_title: str = Query("Background Noise Study", description="Title for the report"),
|
||||
location_name: str = Query("", description="Location name (e.g., 'NRL 1 - West Side')"),
|
||||
project_name: str = Query("", description="Project name override"),
|
||||
client_name: str = Query("", description="Client name for report header"),
|
||||
start_time: str = Query("", description="Filter start time (HH:MM format, e.g., '19:00')"),
|
||||
end_time: str = Query("", description="Filter end time (HH:MM format, e.g., '07:00')"),
|
||||
start_date: str = Query("", description="Filter start date (YYYY-MM-DD format)"),
|
||||
end_date: str = Query("", description="Filter end date (YYYY-MM-DD format)"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
@@ -1354,6 +1360,10 @@ async def generate_excel_report(
|
||||
- Line chart visualization
|
||||
- Time period summary statistics
|
||||
|
||||
Time filtering:
|
||||
- start_time/end_time: Filter to time window (handles overnight like 19:00-07:00)
|
||||
- start_date/end_date: Filter to date range
|
||||
|
||||
Column mapping from RND to Report:
|
||||
- Lmax(Main) -> LAmax (dBA)
|
||||
- LN1(Main) -> LA01 (dBA) [L1 percentile]
|
||||
@@ -1432,6 +1442,99 @@ async def generate_excel_report(
|
||||
logger.error(f"Error reading RND file: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Error reading file: {str(e)}")
|
||||
|
||||
# Apply time and date filtering
|
||||
def filter_rows_by_time(rows, filter_start_time, filter_end_time, filter_start_date, filter_end_date):
|
||||
"""Filter rows by time window and date range."""
|
||||
if not filter_start_time and not filter_end_time and not filter_start_date and not filter_end_date:
|
||||
return rows
|
||||
|
||||
filtered = []
|
||||
|
||||
# Parse time filters
|
||||
start_hour = start_minute = end_hour = end_minute = None
|
||||
if filter_start_time:
|
||||
try:
|
||||
parts = filter_start_time.split(':')
|
||||
start_hour = int(parts[0])
|
||||
start_minute = int(parts[1]) if len(parts) > 1 else 0
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
if filter_end_time:
|
||||
try:
|
||||
parts = filter_end_time.split(':')
|
||||
end_hour = int(parts[0])
|
||||
end_minute = int(parts[1]) if len(parts) > 1 else 0
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
# Parse date filters
|
||||
start_dt = end_dt = None
|
||||
if filter_start_date:
|
||||
try:
|
||||
start_dt = datetime.strptime(filter_start_date, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
pass
|
||||
if filter_end_date:
|
||||
try:
|
||||
end_dt = datetime.strptime(filter_end_date, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
for row in rows:
|
||||
start_time_str = row.get('Start Time', '')
|
||||
if not start_time_str:
|
||||
continue
|
||||
|
||||
try:
|
||||
dt = datetime.strptime(start_time_str, '%Y/%m/%d %H:%M:%S')
|
||||
row_date = dt.date()
|
||||
row_hour = dt.hour
|
||||
row_minute = dt.minute
|
||||
|
||||
# Date filtering
|
||||
if start_dt and row_date < start_dt:
|
||||
continue
|
||||
if end_dt and row_date > end_dt:
|
||||
continue
|
||||
|
||||
# Time filtering (handle overnight ranges like 19:00-07:00)
|
||||
if start_hour is not None and end_hour is not None:
|
||||
row_time_minutes = row_hour * 60 + row_minute
|
||||
start_time_minutes = start_hour * 60 + start_minute
|
||||
end_time_minutes = end_hour * 60 + end_minute
|
||||
|
||||
if start_time_minutes > end_time_minutes:
|
||||
# Overnight range (e.g., 19:00-07:00)
|
||||
if not (row_time_minutes >= start_time_minutes or row_time_minutes < end_time_minutes):
|
||||
continue
|
||||
else:
|
||||
# Same day range (e.g., 07:00-19:00)
|
||||
if not (start_time_minutes <= row_time_minutes < end_time_minutes):
|
||||
continue
|
||||
|
||||
filtered.append(row)
|
||||
except ValueError:
|
||||
# If we can't parse the time, include the row anyway
|
||||
filtered.append(row)
|
||||
|
||||
return filtered
|
||||
|
||||
# Apply filters
|
||||
original_count = len(rnd_rows)
|
||||
rnd_rows = filter_rows_by_time(rnd_rows, start_time, end_time, start_date, end_date)
|
||||
|
||||
if not rnd_rows:
|
||||
time_filter_desc = ""
|
||||
if start_time and end_time:
|
||||
time_filter_desc = f" between {start_time} and {end_time}"
|
||||
if start_date or end_date:
|
||||
time_filter_desc += f" from {start_date or 'start'} to {end_date or 'end'}"
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"No data found after applying filters{time_filter_desc}. Original file had {original_count} rows."
|
||||
)
|
||||
|
||||
# Create Excel workbook
|
||||
wb = openpyxl.Workbook()
|
||||
ws = wb.active
|
||||
@@ -1449,13 +1552,19 @@ async def generate_excel_report(
|
||||
header_fill = PatternFill(start_color="DAEEF3", end_color="DAEEF3", fill_type="solid")
|
||||
|
||||
# Row 1: Report title
|
||||
final_project_name = project_name if project_name else (project.name if project else "")
|
||||
final_title = report_title
|
||||
if project:
|
||||
final_title = f"{report_title} - {project.name}"
|
||||
if final_project_name:
|
||||
final_title = f"{report_title} - {final_project_name}"
|
||||
ws['A1'] = final_title
|
||||
ws['A1'].font = title_font
|
||||
ws.merge_cells('A1:G1')
|
||||
|
||||
# Row 2: Client name (if provided)
|
||||
if client_name:
|
||||
ws['A2'] = f"Client: {client_name}"
|
||||
ws['A2'].font = Font(italic=True, size=10)
|
||||
|
||||
# Row 3: Location name
|
||||
final_location = location_name
|
||||
if not final_location and location:
|
||||
@@ -1464,6 +1573,15 @@ async def generate_excel_report(
|
||||
ws['A3'] = final_location
|
||||
ws['A3'].font = Font(bold=True, size=11)
|
||||
|
||||
# Row 4: Time filter info (if applied)
|
||||
if start_time and end_time:
|
||||
filter_info = f"Time Filter: {start_time} - {end_time}"
|
||||
if start_date or end_date:
|
||||
filter_info += f" | Date Range: {start_date or 'start'} to {end_date or 'end'}"
|
||||
filter_info += f" | {len(rnd_rows)} of {original_count} rows"
|
||||
ws['A4'] = filter_info
|
||||
ws['A4'].font = Font(italic=True, size=9, color="666666")
|
||||
|
||||
# Row 7: Headers
|
||||
headers = ['Test Increment #', 'Date', 'Time', 'LAmax (dBA)', 'LA01 (dBA)', 'LA10 (dBA)', 'Comments']
|
||||
for col, header in enumerate(headers, 1):
|
||||
@@ -1650,6 +1768,364 @@ async def generate_excel_report(
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{project_id}/files/{file_id}/preview-report")
|
||||
async def preview_report_data(
|
||||
request: Request,
|
||||
project_id: str,
|
||||
file_id: str,
|
||||
report_title: str = Query("Background Noise Study", description="Title for the report"),
|
||||
location_name: str = Query("", description="Location name"),
|
||||
project_name: str = Query("", description="Project name override"),
|
||||
client_name: str = Query("", description="Client name"),
|
||||
start_time: str = Query("", description="Filter start time (HH:MM format)"),
|
||||
end_time: str = Query("", description="Filter end time (HH:MM format)"),
|
||||
start_date: str = Query("", description="Filter start date (YYYY-MM-DD format)"),
|
||||
end_date: str = Query("", description="Filter end date (YYYY-MM-DD format)"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Preview report data for editing in jspreadsheet.
|
||||
Returns an HTML page with the spreadsheet editor.
|
||||
"""
|
||||
from backend.models import DataFile, ReportTemplate
|
||||
from pathlib import Path
|
||||
import csv
|
||||
|
||||
# Get the file record
|
||||
file_record = db.query(DataFile).filter_by(id=file_id).first()
|
||||
if not file_record:
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
|
||||
# Verify file belongs to this project
|
||||
session = db.query(RecordingSession).filter_by(id=file_record.session_id).first()
|
||||
if not session or session.project_id != project_id:
|
||||
raise HTTPException(status_code=403, detail="File does not belong to this project")
|
||||
|
||||
# Get related data for report context
|
||||
project = db.query(Project).filter_by(id=project_id).first()
|
||||
location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None
|
||||
|
||||
# Build full file path
|
||||
file_path = Path("data") / file_record.file_path
|
||||
if not file_path.exists():
|
||||
raise HTTPException(status_code=404, detail="File not found on disk")
|
||||
|
||||
# Validate this is a Leq file
|
||||
if '_Leq_' not in file_record.file_path:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Reports can only be generated from Leq files (15-minute averaged data)."
|
||||
)
|
||||
|
||||
# Read and parse the Leq RND file
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
||||
content = f.read()
|
||||
|
||||
reader = csv.DictReader(io.StringIO(content))
|
||||
rnd_rows = []
|
||||
for row in reader:
|
||||
cleaned_row = {}
|
||||
for key, value in row.items():
|
||||
if key:
|
||||
cleaned_key = key.strip()
|
||||
cleaned_value = value.strip() if value else ''
|
||||
if cleaned_value and cleaned_value not in ['-.-', '-', '']:
|
||||
try:
|
||||
cleaned_value = float(cleaned_value)
|
||||
except ValueError:
|
||||
pass
|
||||
elif cleaned_value in ['-.-', '-']:
|
||||
cleaned_value = None
|
||||
cleaned_row[cleaned_key] = cleaned_value
|
||||
rnd_rows.append(cleaned_row)
|
||||
|
||||
if not rnd_rows:
|
||||
raise HTTPException(status_code=400, detail="No data found in RND file")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading RND file: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Error reading file: {str(e)}")
|
||||
|
||||
# Apply time and date filtering (same logic as generate-report)
|
||||
def filter_rows(rows, filter_start_time, filter_end_time, filter_start_date, filter_end_date):
|
||||
if not filter_start_time and not filter_end_time and not filter_start_date and not filter_end_date:
|
||||
return rows
|
||||
|
||||
filtered = []
|
||||
start_hour = start_minute = end_hour = end_minute = None
|
||||
|
||||
if filter_start_time:
|
||||
try:
|
||||
parts = filter_start_time.split(':')
|
||||
start_hour = int(parts[0])
|
||||
start_minute = int(parts[1]) if len(parts) > 1 else 0
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
if filter_end_time:
|
||||
try:
|
||||
parts = filter_end_time.split(':')
|
||||
end_hour = int(parts[0])
|
||||
end_minute = int(parts[1]) if len(parts) > 1 else 0
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
start_dt = end_dt = None
|
||||
if filter_start_date:
|
||||
try:
|
||||
start_dt = datetime.strptime(filter_start_date, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
pass
|
||||
if filter_end_date:
|
||||
try:
|
||||
end_dt = datetime.strptime(filter_end_date, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
for row in rows:
|
||||
start_time_str = row.get('Start Time', '')
|
||||
if not start_time_str:
|
||||
continue
|
||||
|
||||
try:
|
||||
dt = datetime.strptime(start_time_str, '%Y/%m/%d %H:%M:%S')
|
||||
row_date = dt.date()
|
||||
row_hour = dt.hour
|
||||
row_minute = dt.minute
|
||||
|
||||
if start_dt and row_date < start_dt:
|
||||
continue
|
||||
if end_dt and row_date > end_dt:
|
||||
continue
|
||||
|
||||
if start_hour is not None and end_hour is not None:
|
||||
row_time_minutes = row_hour * 60 + row_minute
|
||||
start_time_minutes = start_hour * 60 + start_minute
|
||||
end_time_minutes = end_hour * 60 + end_minute
|
||||
|
||||
if start_time_minutes > end_time_minutes:
|
||||
if not (row_time_minutes >= start_time_minutes or row_time_minutes < end_time_minutes):
|
||||
continue
|
||||
else:
|
||||
if not (start_time_minutes <= row_time_minutes < end_time_minutes):
|
||||
continue
|
||||
|
||||
filtered.append(row)
|
||||
except ValueError:
|
||||
filtered.append(row)
|
||||
|
||||
return filtered
|
||||
|
||||
original_count = len(rnd_rows)
|
||||
rnd_rows = filter_rows(rnd_rows, start_time, end_time, start_date, end_date)
|
||||
|
||||
# Convert to spreadsheet data format (array of arrays)
|
||||
spreadsheet_data = []
|
||||
for idx, row in enumerate(rnd_rows, 1):
|
||||
start_time_str = row.get('Start Time', '')
|
||||
date_str = ''
|
||||
time_str = ''
|
||||
if start_time_str:
|
||||
try:
|
||||
dt = datetime.strptime(start_time_str, '%Y/%m/%d %H:%M:%S')
|
||||
date_str = dt.strftime('%Y-%m-%d')
|
||||
time_str = dt.strftime('%H:%M:%S')
|
||||
except ValueError:
|
||||
date_str = start_time_str
|
||||
time_str = ''
|
||||
|
||||
lmax = row.get('Lmax(Main)', '')
|
||||
ln1 = row.get('LN1(Main)', '')
|
||||
ln2 = row.get('LN2(Main)', '')
|
||||
|
||||
spreadsheet_data.append([
|
||||
idx, # Test #
|
||||
date_str,
|
||||
time_str,
|
||||
lmax if lmax else '',
|
||||
ln1 if ln1 else '',
|
||||
ln2 if ln2 else '',
|
||||
'' # Comments
|
||||
])
|
||||
|
||||
# Prepare context data
|
||||
final_project_name = project_name if project_name else (project.name if project else "")
|
||||
final_location = location_name if location_name else (location.name if location else "")
|
||||
|
||||
# Get templates for the dropdown
|
||||
templates = db.query(ReportTemplate).all()
|
||||
|
||||
return templates.TemplateResponse("report_preview.html", {
|
||||
"request": request,
|
||||
"project_id": project_id,
|
||||
"file_id": file_id,
|
||||
"project": project,
|
||||
"location": location,
|
||||
"file": file_record,
|
||||
"spreadsheet_data": spreadsheet_data,
|
||||
"report_title": report_title,
|
||||
"project_name": final_project_name,
|
||||
"client_name": client_name,
|
||||
"location_name": final_location,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"original_count": original_count,
|
||||
"filtered_count": len(rnd_rows),
|
||||
"templates": templates,
|
||||
})
|
||||
|
||||
|
||||
@router.post("/{project_id}/files/{file_id}/generate-from-preview")
|
||||
async def generate_report_from_preview(
|
||||
project_id: str,
|
||||
file_id: str,
|
||||
data: dict,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Generate an Excel report from edited spreadsheet data.
|
||||
Accepts the edited data from jspreadsheet and creates the final Excel file.
|
||||
"""
|
||||
from backend.models import DataFile
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import openpyxl
|
||||
from openpyxl.chart import LineChart, Reference
|
||||
from openpyxl.styles import Font, Alignment, Border, Side, PatternFill
|
||||
from openpyxl.utils import get_column_letter
|
||||
except ImportError:
|
||||
raise HTTPException(status_code=500, detail="openpyxl is not installed")
|
||||
|
||||
# Get the file record for filename generation
|
||||
file_record = db.query(DataFile).filter_by(id=file_id).first()
|
||||
if not file_record:
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
|
||||
session = db.query(RecordingSession).filter_by(id=file_record.session_id).first()
|
||||
if not session or session.project_id != project_id:
|
||||
raise HTTPException(status_code=403, detail="File does not belong to this project")
|
||||
|
||||
project = db.query(Project).filter_by(id=project_id).first()
|
||||
location = db.query(MonitoringLocation).filter_by(id=session.location_id).first() if session.location_id else None
|
||||
|
||||
# Extract data from request
|
||||
spreadsheet_data = data.get('data', [])
|
||||
report_title = data.get('report_title', 'Background Noise Study')
|
||||
project_name = data.get('project_name', project.name if project else '')
|
||||
client_name = data.get('client_name', '')
|
||||
location_name = data.get('location_name', location.name if location else '')
|
||||
time_filter = data.get('time_filter', '')
|
||||
|
||||
if not spreadsheet_data:
|
||||
raise HTTPException(status_code=400, detail="No data provided")
|
||||
|
||||
# Create Excel workbook
|
||||
wb = openpyxl.Workbook()
|
||||
ws = wb.active
|
||||
ws.title = "Sound Level Data"
|
||||
|
||||
# Styles
|
||||
title_font = Font(bold=True, size=14)
|
||||
header_font = Font(bold=True, size=10)
|
||||
thin_border = Border(
|
||||
left=Side(style='thin'),
|
||||
right=Side(style='thin'),
|
||||
top=Side(style='thin'),
|
||||
bottom=Side(style='thin')
|
||||
)
|
||||
header_fill = PatternFill(start_color="DAEEF3", end_color="DAEEF3", fill_type="solid")
|
||||
|
||||
# Row 1: Title
|
||||
final_title = f"{report_title} - {project_name}" if project_name else report_title
|
||||
ws['A1'] = final_title
|
||||
ws['A1'].font = title_font
|
||||
ws.merge_cells('A1:G1')
|
||||
|
||||
# Row 2: Client
|
||||
if client_name:
|
||||
ws['A2'] = f"Client: {client_name}"
|
||||
ws['A2'].font = Font(italic=True, size=10)
|
||||
|
||||
# Row 3: Location
|
||||
if location_name:
|
||||
ws['A3'] = location_name
|
||||
ws['A3'].font = Font(bold=True, size=11)
|
||||
|
||||
# Row 4: Time filter info
|
||||
if time_filter:
|
||||
ws['A4'] = time_filter
|
||||
ws['A4'].font = Font(italic=True, size=9, color="666666")
|
||||
|
||||
# Row 7: Headers
|
||||
headers = ['Test Increment #', 'Date', 'Time', 'LAmax (dBA)', 'LA01 (dBA)', 'LA10 (dBA)', 'Comments']
|
||||
for col, header in enumerate(headers, 1):
|
||||
cell = ws.cell(row=7, column=col, value=header)
|
||||
cell.font = header_font
|
||||
cell.border = thin_border
|
||||
cell.fill = header_fill
|
||||
cell.alignment = Alignment(horizontal='center')
|
||||
|
||||
# Column widths
|
||||
column_widths = [16, 12, 10, 12, 12, 12, 40]
|
||||
for i, width in enumerate(column_widths, 1):
|
||||
ws.column_dimensions[get_column_letter(i)].width = width
|
||||
|
||||
# Data rows
|
||||
data_start_row = 8
|
||||
for idx, row_data in enumerate(spreadsheet_data):
|
||||
data_row = data_start_row + idx
|
||||
for col, value in enumerate(row_data, 1):
|
||||
cell = ws.cell(row=data_row, column=col, value=value if value != '' else None)
|
||||
cell.border = thin_border
|
||||
|
||||
data_end_row = data_start_row + len(spreadsheet_data) - 1
|
||||
|
||||
# Add chart if we have data
|
||||
if len(spreadsheet_data) > 0:
|
||||
chart = LineChart()
|
||||
chart.title = f"{location_name or 'Sound Level Data'} - Background Noise Study"
|
||||
chart.style = 10
|
||||
chart.y_axis.title = "Sound Level (dBA)"
|
||||
chart.x_axis.title = "Test Increment"
|
||||
chart.height = 12
|
||||
chart.width = 20
|
||||
|
||||
data_ref = Reference(ws, min_col=4, min_row=7, max_col=6, max_row=data_end_row)
|
||||
categories = Reference(ws, min_col=1, min_row=data_start_row, max_row=data_end_row)
|
||||
|
||||
chart.add_data(data_ref, titles_from_data=True)
|
||||
chart.set_categories(categories)
|
||||
|
||||
if len(chart.series) >= 3:
|
||||
chart.series[0].graphicalProperties.line.solidFill = "FF0000"
|
||||
chart.series[1].graphicalProperties.line.solidFill = "00B050"
|
||||
chart.series[2].graphicalProperties.line.solidFill = "0070C0"
|
||||
|
||||
ws.add_chart(chart, "I3")
|
||||
|
||||
# Save to buffer
|
||||
output = io.BytesIO()
|
||||
wb.save(output)
|
||||
output.seek(0)
|
||||
|
||||
# Generate filename
|
||||
filename = file_record.file_path.split('/')[-1].replace('.rnd', '')
|
||||
if location:
|
||||
filename = f"{location.name}_{filename}"
|
||||
filename = f"{filename}_report.xlsx"
|
||||
filename = "".join(c for c in filename if c.isalnum() or c in ('_', '-', '.')).rstrip()
|
||||
|
||||
return StreamingResponse(
|
||||
output,
|
||||
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
headers={"Content-Disposition": f'attachment; filename="{filename}"'}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{project_id}/generate-combined-report")
|
||||
async def generate_combined_excel_report(
|
||||
project_id: str,
|
||||
|
||||
187
backend/routers/report_templates.py
Normal file
187
backend/routers/report_templates.py
Normal file
@@ -0,0 +1,187 @@
|
||||
"""
|
||||
Report Templates Router
|
||||
|
||||
CRUD operations for report template management.
|
||||
Templates store time filter presets and report configuration for reuse.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
import uuid
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.models import ReportTemplate
|
||||
|
||||
router = APIRouter(prefix="/api/report-templates", tags=["report-templates"])
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_templates(
|
||||
project_id: Optional[str] = None,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
List all report templates.
|
||||
Optionally filter by project_id (includes global templates with project_id=None).
|
||||
"""
|
||||
query = db.query(ReportTemplate)
|
||||
|
||||
if project_id:
|
||||
# Include global templates (project_id=None) AND project-specific templates
|
||||
query = query.filter(
|
||||
(ReportTemplate.project_id == None) | (ReportTemplate.project_id == project_id)
|
||||
)
|
||||
|
||||
templates = query.order_by(ReportTemplate.name).all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": t.id,
|
||||
"name": t.name,
|
||||
"project_id": t.project_id,
|
||||
"report_title": t.report_title,
|
||||
"start_time": t.start_time,
|
||||
"end_time": t.end_time,
|
||||
"start_date": t.start_date,
|
||||
"end_date": t.end_date,
|
||||
"created_at": t.created_at.isoformat() if t.created_at else None,
|
||||
"updated_at": t.updated_at.isoformat() if t.updated_at else None,
|
||||
}
|
||||
for t in templates
|
||||
]
|
||||
|
||||
|
||||
@router.post("")
|
||||
async def create_template(
|
||||
data: dict,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Create a new report template.
|
||||
|
||||
Request body:
|
||||
- name: Template name (required)
|
||||
- project_id: Optional project ID for project-specific template
|
||||
- report_title: Default report title
|
||||
- start_time: Start time filter (HH:MM format)
|
||||
- end_time: End time filter (HH:MM format)
|
||||
- start_date: Start date filter (YYYY-MM-DD format)
|
||||
- end_date: End date filter (YYYY-MM-DD format)
|
||||
"""
|
||||
name = data.get("name")
|
||||
if not name:
|
||||
raise HTTPException(status_code=400, detail="Template name is required")
|
||||
|
||||
template = ReportTemplate(
|
||||
id=str(uuid.uuid4()),
|
||||
name=name,
|
||||
project_id=data.get("project_id"),
|
||||
report_title=data.get("report_title", "Background Noise Study"),
|
||||
start_time=data.get("start_time"),
|
||||
end_time=data.get("end_time"),
|
||||
start_date=data.get("start_date"),
|
||||
end_date=data.get("end_date"),
|
||||
)
|
||||
|
||||
db.add(template)
|
||||
db.commit()
|
||||
db.refresh(template)
|
||||
|
||||
return {
|
||||
"id": template.id,
|
||||
"name": template.name,
|
||||
"project_id": template.project_id,
|
||||
"report_title": template.report_title,
|
||||
"start_time": template.start_time,
|
||||
"end_time": template.end_time,
|
||||
"start_date": template.start_date,
|
||||
"end_date": template.end_date,
|
||||
"created_at": template.created_at.isoformat() if template.created_at else None,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{template_id}")
|
||||
async def get_template(
|
||||
template_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get a specific report template by ID."""
|
||||
template = db.query(ReportTemplate).filter_by(id=template_id).first()
|
||||
if not template:
|
||||
raise HTTPException(status_code=404, detail="Template not found")
|
||||
|
||||
return {
|
||||
"id": template.id,
|
||||
"name": template.name,
|
||||
"project_id": template.project_id,
|
||||
"report_title": template.report_title,
|
||||
"start_time": template.start_time,
|
||||
"end_time": template.end_time,
|
||||
"start_date": template.start_date,
|
||||
"end_date": template.end_date,
|
||||
"created_at": template.created_at.isoformat() if template.created_at else None,
|
||||
"updated_at": template.updated_at.isoformat() if template.updated_at else None,
|
||||
}
|
||||
|
||||
|
||||
@router.put("/{template_id}")
|
||||
async def update_template(
|
||||
template_id: str,
|
||||
data: dict,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Update an existing report template."""
|
||||
template = db.query(ReportTemplate).filter_by(id=template_id).first()
|
||||
if not template:
|
||||
raise HTTPException(status_code=404, detail="Template not found")
|
||||
|
||||
# Update fields if provided
|
||||
if "name" in data:
|
||||
template.name = data["name"]
|
||||
if "project_id" in data:
|
||||
template.project_id = data["project_id"]
|
||||
if "report_title" in data:
|
||||
template.report_title = data["report_title"]
|
||||
if "start_time" in data:
|
||||
template.start_time = data["start_time"]
|
||||
if "end_time" in data:
|
||||
template.end_time = data["end_time"]
|
||||
if "start_date" in data:
|
||||
template.start_date = data["start_date"]
|
||||
if "end_date" in data:
|
||||
template.end_date = data["end_date"]
|
||||
|
||||
template.updated_at = datetime.utcnow()
|
||||
db.commit()
|
||||
db.refresh(template)
|
||||
|
||||
return {
|
||||
"id": template.id,
|
||||
"name": template.name,
|
||||
"project_id": template.project_id,
|
||||
"report_title": template.report_title,
|
||||
"start_time": template.start_time,
|
||||
"end_time": template.end_time,
|
||||
"start_date": template.start_date,
|
||||
"end_date": template.end_date,
|
||||
"updated_at": template.updated_at.isoformat() if template.updated_at else None,
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/{template_id}")
|
||||
async def delete_template(
|
||||
template_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Delete a report template."""
|
||||
template = db.query(ReportTemplate).filter_by(id=template_id).first()
|
||||
if not template:
|
||||
raise HTTPException(status_code=404, detail="Template not found")
|
||||
|
||||
db.delete(template)
|
||||
db.commit()
|
||||
|
||||
return JSONResponse({"status": "success", "message": "Template deleted"})
|
||||
Reference in New Issue
Block a user