242 lines
8.5 KiB
Python
242 lines
8.5 KiB
Python
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
|
|
from fastapi.responses import StreamingResponse
|
|
from sqlalchemy.orm import Session
|
|
from datetime import datetime, date
|
|
import csv
|
|
import io
|
|
|
|
from backend.database import get_db
|
|
from backend.models import RosterUnit, Emitter, IgnoredUnit
|
|
|
|
router = APIRouter(prefix="/api/settings", tags=["settings"])
|
|
|
|
|
|
@router.get("/export-csv")
|
|
def export_roster_csv(db: Session = Depends(get_db)):
|
|
"""Export all roster units to CSV"""
|
|
units = db.query(RosterUnit).all()
|
|
|
|
# Create CSV in memory
|
|
output = io.StringIO()
|
|
fieldnames = [
|
|
'unit_id', 'unit_type', 'device_type', 'deployed', 'retired',
|
|
'note', 'project_id', 'location', 'address', 'coordinates',
|
|
'last_calibrated', 'next_calibration_due', 'deployed_with_modem_id',
|
|
'ip_address', 'phone_number', 'hardware_model'
|
|
]
|
|
|
|
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
|
writer.writeheader()
|
|
|
|
for unit in units:
|
|
writer.writerow({
|
|
'unit_id': unit.id,
|
|
'unit_type': unit.unit_type or '',
|
|
'device_type': unit.device_type or 'seismograph',
|
|
'deployed': 'true' if unit.deployed else 'false',
|
|
'retired': 'true' if unit.retired else 'false',
|
|
'note': unit.note or '',
|
|
'project_id': unit.project_id or '',
|
|
'location': unit.location or '',
|
|
'address': unit.address or '',
|
|
'coordinates': unit.coordinates or '',
|
|
'last_calibrated': unit.last_calibrated.strftime('%Y-%m-%d') if unit.last_calibrated else '',
|
|
'next_calibration_due': unit.next_calibration_due.strftime('%Y-%m-%d') if unit.next_calibration_due else '',
|
|
'deployed_with_modem_id': unit.deployed_with_modem_id or '',
|
|
'ip_address': unit.ip_address or '',
|
|
'phone_number': unit.phone_number or '',
|
|
'hardware_model': unit.hardware_model or ''
|
|
})
|
|
|
|
output.seek(0)
|
|
filename = f"roster_export_{date.today().isoformat()}.csv"
|
|
|
|
return StreamingResponse(
|
|
io.BytesIO(output.getvalue().encode('utf-8')),
|
|
media_type="text/csv",
|
|
headers={"Content-Disposition": f"attachment; filename={filename}"}
|
|
)
|
|
|
|
|
|
@router.get("/stats")
|
|
def get_table_stats(db: Session = Depends(get_db)):
|
|
"""Get counts for all tables"""
|
|
roster_count = db.query(RosterUnit).count()
|
|
emitters_count = db.query(Emitter).count()
|
|
ignored_count = db.query(IgnoredUnit).count()
|
|
|
|
return {
|
|
"roster": roster_count,
|
|
"emitters": emitters_count,
|
|
"ignored": ignored_count,
|
|
"total": roster_count + emitters_count + ignored_count
|
|
}
|
|
|
|
|
|
@router.get("/roster-units")
|
|
def get_all_roster_units(db: Session = Depends(get_db)):
|
|
"""Get all roster units for management table"""
|
|
units = db.query(RosterUnit).order_by(RosterUnit.id).all()
|
|
|
|
return [{
|
|
"id": unit.id,
|
|
"device_type": unit.device_type or "seismograph",
|
|
"unit_type": unit.unit_type or "series3",
|
|
"deployed": unit.deployed,
|
|
"retired": unit.retired,
|
|
"note": unit.note or "",
|
|
"project_id": unit.project_id or "",
|
|
"location": unit.location or "",
|
|
"address": unit.address or "",
|
|
"coordinates": unit.coordinates or "",
|
|
"last_calibrated": unit.last_calibrated.isoformat() if unit.last_calibrated else None,
|
|
"next_calibration_due": unit.next_calibration_due.isoformat() if unit.next_calibration_due else None,
|
|
"deployed_with_modem_id": unit.deployed_with_modem_id or "",
|
|
"ip_address": unit.ip_address or "",
|
|
"phone_number": unit.phone_number or "",
|
|
"hardware_model": unit.hardware_model or "",
|
|
"last_updated": unit.last_updated.isoformat() if unit.last_updated else None
|
|
} for unit in units]
|
|
|
|
|
|
def parse_date(date_str):
|
|
"""Helper function to parse date strings"""
|
|
if not date_str or not date_str.strip():
|
|
return None
|
|
try:
|
|
return datetime.strptime(date_str.strip(), "%Y-%m-%d").date()
|
|
except ValueError:
|
|
return None
|
|
|
|
|
|
@router.post("/import-csv-replace")
|
|
async def import_csv_replace(
|
|
file: UploadFile = File(...),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Replace all roster data with CSV import (atomic transaction).
|
|
Clears roster table first, then imports all rows from CSV.
|
|
"""
|
|
|
|
if not file.filename.endswith('.csv'):
|
|
raise HTTPException(status_code=400, detail="File must be a CSV")
|
|
|
|
# Read and parse CSV
|
|
contents = await file.read()
|
|
csv_text = contents.decode('utf-8')
|
|
csv_reader = csv.DictReader(io.StringIO(csv_text))
|
|
|
|
# Parse all rows FIRST (fail fast before deletion)
|
|
parsed_units = []
|
|
for row_num, row in enumerate(csv_reader, start=2):
|
|
unit_id = row.get('unit_id', '').strip()
|
|
if not unit_id:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Row {row_num}: Missing required field unit_id"
|
|
)
|
|
|
|
# Parse and validate dates
|
|
last_cal_date = parse_date(row.get('last_calibrated'))
|
|
next_cal_date = parse_date(row.get('next_calibration_due'))
|
|
|
|
parsed_units.append({
|
|
'id': unit_id,
|
|
'unit_type': row.get('unit_type', 'series3'),
|
|
'device_type': row.get('device_type', 'seismograph'),
|
|
'deployed': row.get('deployed', '').lower() in ('true', '1', 'yes'),
|
|
'retired': row.get('retired', '').lower() in ('true', '1', 'yes'),
|
|
'note': row.get('note', ''),
|
|
'project_id': row.get('project_id') or None,
|
|
'location': row.get('location') or None,
|
|
'address': row.get('address') or None,
|
|
'coordinates': row.get('coordinates') or None,
|
|
'last_calibrated': last_cal_date,
|
|
'next_calibration_due': next_cal_date,
|
|
'deployed_with_modem_id': row.get('deployed_with_modem_id') or None,
|
|
'ip_address': row.get('ip_address') or None,
|
|
'phone_number': row.get('phone_number') or None,
|
|
'hardware_model': row.get('hardware_model') or None,
|
|
})
|
|
|
|
# Atomic transaction: delete all, then insert all
|
|
try:
|
|
deleted_count = db.query(RosterUnit).delete()
|
|
|
|
for unit_data in parsed_units:
|
|
new_unit = RosterUnit(**unit_data, last_updated=datetime.utcnow())
|
|
db.add(new_unit)
|
|
|
|
db.commit()
|
|
|
|
return {
|
|
"message": "Roster replaced successfully",
|
|
"deleted": deleted_count,
|
|
"added": len(parsed_units)
|
|
}
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Import failed: {str(e)}")
|
|
|
|
|
|
@router.post("/clear-all")
|
|
def clear_all_data(db: Session = Depends(get_db)):
|
|
"""Clear all tables (roster, emitters, ignored)"""
|
|
try:
|
|
roster_count = db.query(RosterUnit).delete()
|
|
emitters_count = db.query(Emitter).delete()
|
|
ignored_count = db.query(IgnoredUnit).delete()
|
|
|
|
db.commit()
|
|
|
|
return {
|
|
"message": "All data cleared",
|
|
"deleted": {
|
|
"roster": roster_count,
|
|
"emitters": emitters_count,
|
|
"ignored": ignored_count,
|
|
"total": roster_count + emitters_count + ignored_count
|
|
}
|
|
}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
|
|
|
|
|
@router.post("/clear-roster")
|
|
def clear_roster(db: Session = Depends(get_db)):
|
|
"""Clear roster table only"""
|
|
try:
|
|
count = db.query(RosterUnit).delete()
|
|
db.commit()
|
|
return {"message": "Roster cleared", "deleted": count}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
|
|
|
|
|
@router.post("/clear-emitters")
|
|
def clear_emitters(db: Session = Depends(get_db)):
|
|
"""Clear emitters table only"""
|
|
try:
|
|
count = db.query(Emitter).delete()
|
|
db.commit()
|
|
return {"message": "Emitters cleared", "deleted": count}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|
|
|
|
|
|
@router.post("/clear-ignored")
|
|
def clear_ignored(db: Session = Depends(get_db)):
|
|
"""Clear ignored units table only"""
|
|
try:
|
|
count = db.query(IgnoredUnit).delete()
|
|
db.commit()
|
|
return {"message": "Ignored units cleared", "deleted": count}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Clear failed: {str(e)}")
|