db management system added
This commit is contained in:
146
backend/routers/activity.py
Normal file
146
backend/routers/activity.py
Normal file
@@ -0,0 +1,146 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import desc
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Dict, Any
|
||||
from backend.database import get_db
|
||||
from backend.models import UnitHistory, Emitter, RosterUnit
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["activity"])
|
||||
|
||||
PHOTOS_BASE_DIR = Path("data/photos")
|
||||
|
||||
|
||||
@router.get("/recent-activity")
|
||||
def get_recent_activity(limit: int = 20, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Get recent activity feed combining unit history changes and photo uploads.
|
||||
Returns a unified timeline of events sorted by timestamp (newest first).
|
||||
"""
|
||||
activities = []
|
||||
|
||||
# Get recent history entries
|
||||
history_entries = db.query(UnitHistory)\
|
||||
.order_by(desc(UnitHistory.changed_at))\
|
||||
.limit(limit * 2)\
|
||||
.all() # Get more than needed to mix with photos
|
||||
|
||||
for entry in history_entries:
|
||||
activity = {
|
||||
"type": "history",
|
||||
"timestamp": entry.changed_at.isoformat(),
|
||||
"timestamp_unix": entry.changed_at.timestamp(),
|
||||
"unit_id": entry.unit_id,
|
||||
"change_type": entry.change_type,
|
||||
"field_name": entry.field_name,
|
||||
"old_value": entry.old_value,
|
||||
"new_value": entry.new_value,
|
||||
"source": entry.source,
|
||||
"notes": entry.notes
|
||||
}
|
||||
activities.append(activity)
|
||||
|
||||
# Get recent photos
|
||||
if PHOTOS_BASE_DIR.exists():
|
||||
image_extensions = {".jpg", ".jpeg", ".png", ".gif", ".webp"}
|
||||
photo_activities = []
|
||||
|
||||
for unit_dir in PHOTOS_BASE_DIR.iterdir():
|
||||
if not unit_dir.is_dir():
|
||||
continue
|
||||
|
||||
unit_id = unit_dir.name
|
||||
|
||||
for file_path in unit_dir.iterdir():
|
||||
if file_path.is_file() and file_path.suffix.lower() in image_extensions:
|
||||
modified_time = file_path.stat().st_mtime
|
||||
photo_activities.append({
|
||||
"type": "photo",
|
||||
"timestamp": datetime.fromtimestamp(modified_time).isoformat(),
|
||||
"timestamp_unix": modified_time,
|
||||
"unit_id": unit_id,
|
||||
"filename": file_path.name,
|
||||
"photo_url": f"/api/unit/{unit_id}/photo/{file_path.name}"
|
||||
})
|
||||
|
||||
activities.extend(photo_activities)
|
||||
|
||||
# Sort all activities by timestamp (newest first)
|
||||
activities.sort(key=lambda x: x["timestamp_unix"], reverse=True)
|
||||
|
||||
# Limit to requested number
|
||||
activities = activities[:limit]
|
||||
|
||||
return {
|
||||
"activities": activities,
|
||||
"total": len(activities)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/recent-callins")
|
||||
def get_recent_callins(hours: int = 6, limit: int = None, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Get recent unit call-ins (units that have reported recently).
|
||||
Returns units sorted by most recent last_seen timestamp.
|
||||
|
||||
Args:
|
||||
hours: Look back this many hours (default: 6)
|
||||
limit: Maximum number of results (default: None = all)
|
||||
"""
|
||||
# Calculate the time threshold
|
||||
time_threshold = datetime.now(timezone.utc) - timedelta(hours=hours)
|
||||
|
||||
# Query emitters with recent activity, joined with roster info
|
||||
recent_emitters = db.query(Emitter)\
|
||||
.filter(Emitter.last_seen >= time_threshold)\
|
||||
.order_by(desc(Emitter.last_seen))\
|
||||
.all()
|
||||
|
||||
# Get roster info for all units
|
||||
roster_dict = {r.id: r for r in db.query(RosterUnit).all()}
|
||||
|
||||
call_ins = []
|
||||
for emitter in recent_emitters:
|
||||
roster_unit = roster_dict.get(emitter.id)
|
||||
|
||||
# Calculate time since last seen
|
||||
last_seen_utc = emitter.last_seen.replace(tzinfo=timezone.utc) if emitter.last_seen.tzinfo is None else emitter.last_seen
|
||||
time_diff = datetime.now(timezone.utc) - last_seen_utc
|
||||
|
||||
# Format time ago
|
||||
if time_diff.total_seconds() < 60:
|
||||
time_ago = "just now"
|
||||
elif time_diff.total_seconds() < 3600:
|
||||
minutes = int(time_diff.total_seconds() / 60)
|
||||
time_ago = f"{minutes}m ago"
|
||||
else:
|
||||
hours_ago = time_diff.total_seconds() / 3600
|
||||
if hours_ago < 24:
|
||||
time_ago = f"{int(hours_ago)}h {int((hours_ago % 1) * 60)}m ago"
|
||||
else:
|
||||
days = int(hours_ago / 24)
|
||||
time_ago = f"{days}d ago"
|
||||
|
||||
call_in = {
|
||||
"unit_id": emitter.id,
|
||||
"last_seen": emitter.last_seen.isoformat(),
|
||||
"time_ago": time_ago,
|
||||
"status": emitter.status,
|
||||
"device_type": roster_unit.device_type if roster_unit else "seismograph",
|
||||
"deployed": roster_unit.deployed if roster_unit else False,
|
||||
"note": roster_unit.note if roster_unit and roster_unit.note else "",
|
||||
"location": roster_unit.address if roster_unit and roster_unit.address else (roster_unit.location if roster_unit else "")
|
||||
}
|
||||
call_ins.append(call_in)
|
||||
|
||||
# Apply limit if specified
|
||||
if limit:
|
||||
call_ins = call_ins[:limit]
|
||||
|
||||
return {
|
||||
"call_ins": call_ins,
|
||||
"total": len(call_ins),
|
||||
"hours": hours,
|
||||
"time_threshold": time_threshold.isoformat()
|
||||
}
|
||||
@@ -1,14 +1,17 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
|
||||
from fastapi.responses import StreamingResponse
|
||||
from fastapi.responses import StreamingResponse, FileResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, date
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
import csv
|
||||
import io
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.models import RosterUnit, Emitter, IgnoredUnit, UserPreferences
|
||||
from backend.services.database_backup import DatabaseBackupService
|
||||
|
||||
router = APIRouter(prefix="/api/settings", tags=["settings"])
|
||||
|
||||
@@ -325,3 +328,144 @@ def update_preferences(
|
||||
"status_pending_threshold_hours": prefs.status_pending_threshold_hours,
|
||||
"updated_at": prefs.updated_at.isoformat() if prefs.updated_at else None
|
||||
}
|
||||
|
||||
|
||||
# Database Management Endpoints
|
||||
|
||||
backup_service = DatabaseBackupService()
|
||||
|
||||
|
||||
@router.get("/database/stats")
|
||||
def get_database_stats():
|
||||
"""Get current database statistics"""
|
||||
try:
|
||||
stats = backup_service.get_database_stats()
|
||||
return stats
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get database stats: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/database/snapshot")
|
||||
def create_database_snapshot(description: Optional[str] = None):
|
||||
"""Create a full database snapshot"""
|
||||
try:
|
||||
snapshot = backup_service.create_snapshot(description=description)
|
||||
return {
|
||||
"message": "Snapshot created successfully",
|
||||
"snapshot": snapshot
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Snapshot creation failed: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/database/snapshots")
|
||||
def list_database_snapshots():
|
||||
"""List all available database snapshots"""
|
||||
try:
|
||||
snapshots = backup_service.list_snapshots()
|
||||
return {
|
||||
"snapshots": snapshots,
|
||||
"count": len(snapshots)
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list snapshots: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/database/snapshot/{filename}")
|
||||
def download_snapshot(filename: str):
|
||||
"""Download a specific snapshot file"""
|
||||
try:
|
||||
snapshot_path = backup_service.download_snapshot(filename)
|
||||
return FileResponse(
|
||||
path=str(snapshot_path),
|
||||
filename=filename,
|
||||
media_type="application/x-sqlite3"
|
||||
)
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(status_code=404, detail=f"Snapshot {filename} not found")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Download failed: {str(e)}")
|
||||
|
||||
|
||||
@router.delete("/database/snapshot/{filename}")
|
||||
def delete_database_snapshot(filename: str):
|
||||
"""Delete a specific snapshot"""
|
||||
try:
|
||||
backup_service.delete_snapshot(filename)
|
||||
return {
|
||||
"message": f"Snapshot {filename} deleted successfully",
|
||||
"filename": filename
|
||||
}
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(status_code=404, detail=f"Snapshot {filename} not found")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Delete failed: {str(e)}")
|
||||
|
||||
|
||||
class RestoreRequest(BaseModel):
|
||||
"""Schema for restore request"""
|
||||
filename: str
|
||||
create_backup: bool = True
|
||||
|
||||
|
||||
@router.post("/database/restore")
|
||||
def restore_database(request: RestoreRequest, db: Session = Depends(get_db)):
|
||||
"""Restore database from a snapshot"""
|
||||
try:
|
||||
# Close the database connection before restoring
|
||||
db.close()
|
||||
|
||||
result = backup_service.restore_snapshot(
|
||||
filename=request.filename,
|
||||
create_backup_before_restore=request.create_backup
|
||||
)
|
||||
|
||||
return result
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(status_code=404, detail=f"Snapshot {request.filename} not found")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Restore failed: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/database/upload-snapshot")
|
||||
async def upload_snapshot(file: UploadFile = File(...)):
|
||||
"""Upload a snapshot file to the backups directory"""
|
||||
if not file.filename.endswith('.db'):
|
||||
raise HTTPException(status_code=400, detail="File must be a .db file")
|
||||
|
||||
try:
|
||||
# Save uploaded file to backups directory
|
||||
backups_dir = Path("./data/backups")
|
||||
backups_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
|
||||
uploaded_filename = f"snapshot_uploaded_{timestamp}.db"
|
||||
file_path = backups_dir / uploaded_filename
|
||||
|
||||
# Save file
|
||||
with open(file_path, "wb") as buffer:
|
||||
shutil.copyfileobj(file.file, buffer)
|
||||
|
||||
# Create metadata
|
||||
metadata = {
|
||||
"filename": uploaded_filename,
|
||||
"created_at": timestamp,
|
||||
"created_at_iso": datetime.utcnow().isoformat(),
|
||||
"description": f"Uploaded: {file.filename}",
|
||||
"size_bytes": file_path.stat().st_size,
|
||||
"size_mb": round(file_path.stat().st_size / (1024 * 1024), 2),
|
||||
"type": "uploaded"
|
||||
}
|
||||
|
||||
metadata_path = backups_dir / f"{uploaded_filename}.meta.json"
|
||||
import json
|
||||
with open(metadata_path, 'w') as f:
|
||||
json.dump(metadata, f, indent=2)
|
||||
|
||||
return {
|
||||
"message": "Snapshot uploaded successfully",
|
||||
"snapshot": metadata
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
|
||||
|
||||
Reference in New Issue
Block a user