480 lines
17 KiB
Python
480 lines
17 KiB
Python
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
|
|
from fastapi.responses import StreamingResponse, FileResponse
|
|
from sqlalchemy.orm import Session
|
|
from datetime import datetime, date
|
|
from pydantic import BaseModel
|
|
from typing import Optional
|
|
import csv
|
|
import io
|
|
import shutil
|
|
from pathlib import Path
|
|
|
|
from backend.database import get_db
|
|
from backend.models import RosterUnit, Emitter, IgnoredUnit, UserPreferences
|
|
from backend.services.database_backup import DatabaseBackupService
|
|
|
|
router = APIRouter(prefix="/api/settings", tags=["settings"])
|
|
|
|
|
|
@router.get("/export-csv")
|
|
def export_roster_csv(db: Session = Depends(get_db)):
|
|
"""Export all roster units to CSV"""
|
|
units = db.query(RosterUnit).all()
|
|
|
|
# Create CSV in memory
|
|
output = io.StringIO()
|
|
fieldnames = [
|
|
'unit_id', 'unit_type', 'device_type', 'deployed', 'retired',
|
|
'note', 'project_id', 'location', 'address', 'coordinates',
|
|
'last_calibrated', 'next_calibration_due', 'deployed_with_modem_id',
|
|
'ip_address', 'phone_number', 'hardware_model'
|
|
]
|
|
|
|
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
|
writer.writeheader()
|
|
|
|
for unit in units:
|
|
writer.writerow({
|
|
'unit_id': unit.id,
|
|
'unit_type': unit.unit_type or '',
|
|
'device_type': unit.device_type or 'seismograph',
|
|
'deployed': 'true' if unit.deployed else 'false',
|
|
'retired': 'true' if unit.retired else 'false',
|
|
'note': unit.note or '',
|
|
'project_id': unit.project_id or '',
|
|
'location': unit.location or '',
|
|
'address': unit.address or '',
|
|
'coordinates': unit.coordinates or '',
|
|
'last_calibrated': unit.last_calibrated.strftime('%Y-%m-%d') if unit.last_calibrated else '',
|
|
'next_calibration_due': unit.next_calibration_due.strftime('%Y-%m-%d') if unit.next_calibration_due else '',
|
|
'deployed_with_modem_id': unit.deployed_with_modem_id or '',
|
|
'ip_address': unit.ip_address or '',
|
|
'phone_number': unit.phone_number or '',
|
|
'hardware_model': unit.hardware_model or ''
|
|
})
|
|
|
|
output.seek(0)
|
|
filename = f"roster_export_{date.today().isoformat()}.csv"
|
|
|
|
return StreamingResponse(
|
|
io.BytesIO(output.getvalue().encode('utf-8')),
|
|
media_type="text/csv",
|
|
headers={"Content-Disposition": f"attachment; filename={filename}"}
|
|
)
|
|
|
|
|
|
@router.get("/stats")
|
|
def get_table_stats(db: Session = Depends(get_db)):
|
|
"""Get counts for all tables"""
|
|
roster_count = db.query(RosterUnit).count()
|
|
emitters_count = db.query(Emitter).count()
|
|
ignored_count = db.query(IgnoredUnit).count()
|
|
|
|
return {
|
|
"roster": roster_count,
|
|
"emitters": emitters_count,
|
|
"ignored": ignored_count,
|
|
"total": roster_count + emitters_count + ignored_count
|
|
}
|
|
|
|
|
|
@router.get("/roster-units")
|
|
def get_all_roster_units(db: Session = Depends(get_db)):
|
|
"""Get all roster units for management table"""
|
|
units = db.query(RosterUnit).order_by(RosterUnit.id).all()
|
|
|
|
return [{
|
|
"id": unit.id,
|
|
"device_type": unit.device_type or "seismograph",
|
|
"unit_type": unit.unit_type or "series3",
|
|
"deployed": unit.deployed,
|
|
"retired": unit.retired,
|
|
"note": unit.note or "",
|
|
"project_id": unit.project_id or "",
|
|
"location": unit.location or "",
|
|
"address": unit.address or "",
|
|
"coordinates": unit.coordinates or "",
|
|
"last_calibrated": unit.last_calibrated.isoformat() if unit.last_calibrated else None,
|
|
"next_calibration_due": unit.next_calibration_due.isoformat() if unit.next_calibration_due else None,
|
|
"deployed_with_modem_id": unit.deployed_with_modem_id or "",
|
|
"ip_address": unit.ip_address or "",
|
|
"phone_number": unit.phone_number or "",
|
|
"hardware_model": unit.hardware_model or "",
|
|
"slm_host": unit.slm_host or "",
|
|
"slm_tcp_port": unit.slm_tcp_port,
|
|
"slm_model": unit.slm_model or "",
|
|
"slm_serial_number": unit.slm_serial_number or "",
|
|
"slm_frequency_weighting": unit.slm_frequency_weighting or "",
|
|
"slm_time_weighting": unit.slm_time_weighting or "",
|
|
"slm_measurement_range": unit.slm_measurement_range or "",
|
|
"slm_last_check": unit.slm_last_check.isoformat() if unit.slm_last_check else None,
|
|
"last_updated": unit.last_updated.isoformat() if unit.last_updated else None
|
|
} for unit in units]
|
|
|
|
|
|
def parse_date(date_str):
|
|
"""Helper function to parse date strings"""
|
|
if not date_str or not date_str.strip():
|
|
return None
|
|
try:
|
|
return datetime.strptime(date_str.strip(), "%Y-%m-%d").date()
|
|
except ValueError:
|
|
return None
|
|
|
|
|
|
@router.post("/import-csv-replace")
|
|
async def import_csv_replace(
|
|
file: UploadFile = File(...),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Replace all roster data with CSV import (atomic transaction).
|
|
Clears roster table first, then imports all rows from CSV.
|
|
"""
|
|
|
|
if not file.filename.endswith('.csv'):
|
|
raise HTTPException(status_code=400, detail="File must be a CSV")
|
|
|
|
# Read and parse CSV
|
|
contents = await file.read()
|
|
csv_text = contents.decode('utf-8')
|
|
csv_reader = csv.DictReader(io.StringIO(csv_text))
|
|
|
|
# Parse all rows FIRST (fail fast before deletion)
|
|
parsed_units = []
|
|
for row_num, row in enumerate(csv_reader, start=2):
|
|
unit_id = row.get('unit_id', '').strip()
|
|
if not unit_id:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Row {row_num}: Missing required field unit_id"
|
|
)
|
|
|
|
# Parse and validate dates
|
|
last_cal_date = parse_date(row.get('last_calibrated'))
|
|
next_cal_date = parse_date(row.get('next_calibration_due'))
|
|
|
|
parsed_units.append({
|
|
'id': unit_id,
|
|
'unit_type': row.get('unit_type', 'series3'),
|
|
'device_type': row.get('device_type', 'seismograph'),
|
|
'deployed': row.get('deployed', '').lower() in ('true', '1', 'yes'),
|
|
'retired': row.get('retired', '').lower() in ('true', '1', 'yes'),
|
|
'note': row.get('note', ''),
|
|
'project_id': row.get('project_id') or None,
|
|
'location': row.get('location') or None,
|
|
'address': row.get('address') or None,
|
|
'coordinates': row.get('coordinates') or None,
|
|
'last_calibrated': last_cal_date,
|
|
'next_calibration_due': next_cal_date,
|
|
'deployed_with_modem_id': row.get('deployed_with_modem_id') or None,
|
|
'ip_address': row.get('ip_address') or None,
|
|
'phone_number': row.get('phone_number') or None,
|
|
'hardware_model': row.get('hardware_model') or None,
|
|
})
|
|
|
|
# Atomic transaction: delete all, then insert all
|
|
try:
|
|
deleted_count = db.query(RosterUnit).delete()
|
|
|
|
for unit_data in parsed_units:
|
|
new_unit = RosterUnit(**unit_data, last_updated=datetime.utcnow())
|
|
db.add(new_unit)
|
|
|
|
db.commit()
|
|
|
|
return {
|
|
"message": "Roster replaced successfully",
|
|
"deleted": deleted_count,
|
|
"added": len(parsed_units)
|
|
}
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Import failed: {str(e)}")
|
|
|
|
|
|
@router.post("/clear-all")
|
|
def clear_all_data(db: Session = Depends(get_db)):
|
|
"""Clear all tables (roster, emitters, ignored)"""
|
|
try:
|
|
roster_count = db.query(RosterUnit).delete()
|
|
emitters_count = db.query(Emitter).delete()
|
|
ignored_count = db.query(IgnoredUnit).delete()
|
|
|
|
db.commit()
|
|
|
|
return {
|
|
"message": "All data cleared",
|
|
"deleted": {
|
|
"roster": roster_count,
|
|
"emitters": emitters_count,
|
|
"ignored": ignored_count,
|
|
"total": roster_count + emitters_count + ignored_count
|
|
}
|
|
}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
|
|
|
|
|
@router.post("/clear-roster")
|
|
def clear_roster(db: Session = Depends(get_db)):
|
|
"""Clear roster table only"""
|
|
try:
|
|
count = db.query(RosterUnit).delete()
|
|
db.commit()
|
|
return {"message": "Roster cleared", "deleted": count}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
|
|
|
|
|
@router.post("/clear-emitters")
|
|
def clear_emitters(db: Session = Depends(get_db)):
|
|
"""Clear emitters table only"""
|
|
try:
|
|
count = db.query(Emitter).delete()
|
|
db.commit()
|
|
return {"message": "Emitters cleared", "deleted": count}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
|
|
|
|
|
@router.post("/clear-ignored")
|
|
def clear_ignored(db: Session = Depends(get_db)):
|
|
"""Clear ignored units table only"""
|
|
try:
|
|
count = db.query(IgnoredUnit).delete()
|
|
db.commit()
|
|
return {"message": "Ignored units cleared", "deleted": count}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
|
|
|
|
|
# User Preferences Endpoints
|
|
|
|
class PreferencesUpdate(BaseModel):
|
|
"""Schema for updating user preferences (all fields optional)"""
|
|
timezone: Optional[str] = None
|
|
theme: Optional[str] = None
|
|
auto_refresh_interval: Optional[int] = None
|
|
date_format: Optional[str] = None
|
|
table_rows_per_page: Optional[int] = None
|
|
calibration_interval_days: Optional[int] = None
|
|
calibration_warning_days: Optional[int] = None
|
|
status_ok_threshold_hours: Optional[int] = None
|
|
status_pending_threshold_hours: Optional[int] = None
|
|
|
|
|
|
@router.get("/preferences")
|
|
def get_preferences(db: Session = Depends(get_db)):
|
|
"""
|
|
Get user preferences. Creates default preferences if none exist.
|
|
"""
|
|
prefs = db.query(UserPreferences).filter(UserPreferences.id == 1).first()
|
|
|
|
if not prefs:
|
|
# Create default preferences
|
|
prefs = UserPreferences(id=1)
|
|
db.add(prefs)
|
|
db.commit()
|
|
db.refresh(prefs)
|
|
|
|
return {
|
|
"timezone": prefs.timezone,
|
|
"theme": prefs.theme,
|
|
"auto_refresh_interval": prefs.auto_refresh_interval,
|
|
"date_format": prefs.date_format,
|
|
"table_rows_per_page": prefs.table_rows_per_page,
|
|
"calibration_interval_days": prefs.calibration_interval_days,
|
|
"calibration_warning_days": prefs.calibration_warning_days,
|
|
"status_ok_threshold_hours": prefs.status_ok_threshold_hours,
|
|
"status_pending_threshold_hours": prefs.status_pending_threshold_hours,
|
|
"updated_at": prefs.updated_at.isoformat() if prefs.updated_at else None
|
|
}
|
|
|
|
|
|
@router.put("/preferences")
|
|
def update_preferences(
|
|
updates: PreferencesUpdate,
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Update user preferences. Accepts partial updates.
|
|
Creates default preferences if none exist.
|
|
"""
|
|
prefs = db.query(UserPreferences).filter(UserPreferences.id == 1).first()
|
|
|
|
if not prefs:
|
|
# Create default preferences
|
|
prefs = UserPreferences(id=1)
|
|
db.add(prefs)
|
|
|
|
# Update only provided fields
|
|
update_data = updates.dict(exclude_unset=True)
|
|
for field, value in update_data.items():
|
|
setattr(prefs, field, value)
|
|
|
|
prefs.updated_at = datetime.utcnow()
|
|
|
|
db.commit()
|
|
db.refresh(prefs)
|
|
|
|
return {
|
|
"message": "Preferences updated successfully",
|
|
"timezone": prefs.timezone,
|
|
"theme": prefs.theme,
|
|
"auto_refresh_interval": prefs.auto_refresh_interval,
|
|
"date_format": prefs.date_format,
|
|
"table_rows_per_page": prefs.table_rows_per_page,
|
|
"calibration_interval_days": prefs.calibration_interval_days,
|
|
"calibration_warning_days": prefs.calibration_warning_days,
|
|
"status_ok_threshold_hours": prefs.status_ok_threshold_hours,
|
|
"status_pending_threshold_hours": prefs.status_pending_threshold_hours,
|
|
"updated_at": prefs.updated_at.isoformat() if prefs.updated_at else None
|
|
}
|
|
|
|
|
|
# Database Management Endpoints
|
|
|
|
backup_service = DatabaseBackupService()
|
|
|
|
|
|
@router.get("/database/stats")
|
|
def get_database_stats():
|
|
"""Get current database statistics"""
|
|
try:
|
|
stats = backup_service.get_database_stats()
|
|
return stats
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Failed to get database stats: {str(e)}")
|
|
|
|
|
|
@router.post("/database/snapshot")
|
|
def create_database_snapshot(description: Optional[str] = None):
|
|
"""Create a full database snapshot"""
|
|
try:
|
|
snapshot = backup_service.create_snapshot(description=description)
|
|
return {
|
|
"message": "Snapshot created successfully",
|
|
"snapshot": snapshot
|
|
}
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Snapshot creation failed: {str(e)}")
|
|
|
|
|
|
@router.get("/database/snapshots")
|
|
def list_database_snapshots():
|
|
"""List all available database snapshots"""
|
|
try:
|
|
snapshots = backup_service.list_snapshots()
|
|
return {
|
|
"snapshots": snapshots,
|
|
"count": len(snapshots)
|
|
}
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Failed to list snapshots: {str(e)}")
|
|
|
|
|
|
@router.get("/database/snapshot/{filename}")
|
|
def download_snapshot(filename: str):
|
|
"""Download a specific snapshot file"""
|
|
try:
|
|
snapshot_path = backup_service.download_snapshot(filename)
|
|
return FileResponse(
|
|
path=str(snapshot_path),
|
|
filename=filename,
|
|
media_type="application/x-sqlite3"
|
|
)
|
|
except FileNotFoundError:
|
|
raise HTTPException(status_code=404, detail=f"Snapshot {filename} not found")
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Download failed: {str(e)}")
|
|
|
|
|
|
@router.delete("/database/snapshot/{filename}")
|
|
def delete_database_snapshot(filename: str):
|
|
"""Delete a specific snapshot"""
|
|
try:
|
|
backup_service.delete_snapshot(filename)
|
|
return {
|
|
"message": f"Snapshot {filename} deleted successfully",
|
|
"filename": filename
|
|
}
|
|
except FileNotFoundError:
|
|
raise HTTPException(status_code=404, detail=f"Snapshot {filename} not found")
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Delete failed: {str(e)}")
|
|
|
|
|
|
class RestoreRequest(BaseModel):
|
|
"""Schema for restore request"""
|
|
filename: str
|
|
create_backup: bool = True
|
|
|
|
|
|
@router.post("/database/restore")
|
|
def restore_database(request: RestoreRequest, db: Session = Depends(get_db)):
|
|
"""Restore database from a snapshot"""
|
|
try:
|
|
# Close the database connection before restoring
|
|
db.close()
|
|
|
|
result = backup_service.restore_snapshot(
|
|
filename=request.filename,
|
|
create_backup_before_restore=request.create_backup
|
|
)
|
|
|
|
return result
|
|
except FileNotFoundError:
|
|
raise HTTPException(status_code=404, detail=f"Snapshot {request.filename} not found")
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Restore failed: {str(e)}")
|
|
|
|
|
|
@router.post("/database/upload-snapshot")
|
|
async def upload_snapshot(file: UploadFile = File(...)):
|
|
"""Upload a snapshot file to the backups directory"""
|
|
if not file.filename.endswith('.db'):
|
|
raise HTTPException(status_code=400, detail="File must be a .db file")
|
|
|
|
try:
|
|
# Save uploaded file to backups directory
|
|
backups_dir = Path("./data/backups")
|
|
backups_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
|
|
uploaded_filename = f"snapshot_uploaded_{timestamp}.db"
|
|
file_path = backups_dir / uploaded_filename
|
|
|
|
# Save file
|
|
with open(file_path, "wb") as buffer:
|
|
shutil.copyfileobj(file.file, buffer)
|
|
|
|
# Create metadata
|
|
metadata = {
|
|
"filename": uploaded_filename,
|
|
"created_at": timestamp,
|
|
"created_at_iso": datetime.utcnow().isoformat(),
|
|
"description": f"Uploaded: {file.filename}",
|
|
"size_bytes": file_path.stat().st_size,
|
|
"size_mb": round(file_path.stat().st_size / (1024 * 1024), 2),
|
|
"type": "uploaded"
|
|
}
|
|
|
|
metadata_path = backups_dir / f"{uploaded_filename}.meta.json"
|
|
import json
|
|
with open(metadata_path, 'w') as f:
|
|
json.dump(metadata, f, indent=2)
|
|
|
|
return {
|
|
"message": "Snapshot uploaded successfully",
|
|
"snapshot": metadata
|
|
}
|
|
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
|