db management system added

This commit is contained in:
serversdwn
2025-12-16 20:02:04 +00:00
parent d97999e26f
commit 27f8719e33
9 changed files with 1705 additions and 16 deletions

View File

@@ -9,7 +9,7 @@ from typing import List, Dict
from pydantic import BaseModel
from backend.database import engine, Base, get_db
from backend.routers import roster, units, photos, roster_edit, dashboard, dashboard_tabs
from backend.routers import roster, units, photos, roster_edit, dashboard, dashboard_tabs, activity
from backend.services.snapshot import emit_status_snapshot
from backend.models import IgnoredUnit
@@ -67,6 +67,7 @@ app.include_router(photos.router)
app.include_router(roster_edit.router)
app.include_router(dashboard.router)
app.include_router(dashboard_tabs.router)
app.include_router(activity.router)
from backend.routers import settings
app.include_router(settings.router)

146
backend/routers/activity.py Normal file
View File

@@ -0,0 +1,146 @@
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from sqlalchemy import desc
from pathlib import Path
from datetime import datetime, timedelta, timezone
from typing import List, Dict, Any
from backend.database import get_db
from backend.models import UnitHistory, Emitter, RosterUnit
router = APIRouter(prefix="/api", tags=["activity"])
PHOTOS_BASE_DIR = Path("data/photos")
@router.get("/recent-activity")
def get_recent_activity(limit: int = 20, db: Session = Depends(get_db)):
"""
Get recent activity feed combining unit history changes and photo uploads.
Returns a unified timeline of events sorted by timestamp (newest first).
"""
activities = []
# Get recent history entries
history_entries = db.query(UnitHistory)\
.order_by(desc(UnitHistory.changed_at))\
.limit(limit * 2)\
.all() # Get more than needed to mix with photos
for entry in history_entries:
activity = {
"type": "history",
"timestamp": entry.changed_at.isoformat(),
"timestamp_unix": entry.changed_at.timestamp(),
"unit_id": entry.unit_id,
"change_type": entry.change_type,
"field_name": entry.field_name,
"old_value": entry.old_value,
"new_value": entry.new_value,
"source": entry.source,
"notes": entry.notes
}
activities.append(activity)
# Get recent photos
if PHOTOS_BASE_DIR.exists():
image_extensions = {".jpg", ".jpeg", ".png", ".gif", ".webp"}
photo_activities = []
for unit_dir in PHOTOS_BASE_DIR.iterdir():
if not unit_dir.is_dir():
continue
unit_id = unit_dir.name
for file_path in unit_dir.iterdir():
if file_path.is_file() and file_path.suffix.lower() in image_extensions:
modified_time = file_path.stat().st_mtime
photo_activities.append({
"type": "photo",
"timestamp": datetime.fromtimestamp(modified_time).isoformat(),
"timestamp_unix": modified_time,
"unit_id": unit_id,
"filename": file_path.name,
"photo_url": f"/api/unit/{unit_id}/photo/{file_path.name}"
})
activities.extend(photo_activities)
# Sort all activities by timestamp (newest first)
activities.sort(key=lambda x: x["timestamp_unix"], reverse=True)
# Limit to requested number
activities = activities[:limit]
return {
"activities": activities,
"total": len(activities)
}
@router.get("/recent-callins")
def get_recent_callins(hours: int = 6, limit: int = None, db: Session = Depends(get_db)):
"""
Get recent unit call-ins (units that have reported recently).
Returns units sorted by most recent last_seen timestamp.
Args:
hours: Look back this many hours (default: 6)
limit: Maximum number of results (default: None = all)
"""
# Calculate the time threshold
time_threshold = datetime.now(timezone.utc) - timedelta(hours=hours)
# Query emitters with recent activity, joined with roster info
recent_emitters = db.query(Emitter)\
.filter(Emitter.last_seen >= time_threshold)\
.order_by(desc(Emitter.last_seen))\
.all()
# Get roster info for all units
roster_dict = {r.id: r for r in db.query(RosterUnit).all()}
call_ins = []
for emitter in recent_emitters:
roster_unit = roster_dict.get(emitter.id)
# Calculate time since last seen
last_seen_utc = emitter.last_seen.replace(tzinfo=timezone.utc) if emitter.last_seen.tzinfo is None else emitter.last_seen
time_diff = datetime.now(timezone.utc) - last_seen_utc
# Format time ago
if time_diff.total_seconds() < 60:
time_ago = "just now"
elif time_diff.total_seconds() < 3600:
minutes = int(time_diff.total_seconds() / 60)
time_ago = f"{minutes}m ago"
else:
hours_ago = time_diff.total_seconds() / 3600
if hours_ago < 24:
time_ago = f"{int(hours_ago)}h {int((hours_ago % 1) * 60)}m ago"
else:
days = int(hours_ago / 24)
time_ago = f"{days}d ago"
call_in = {
"unit_id": emitter.id,
"last_seen": emitter.last_seen.isoformat(),
"time_ago": time_ago,
"status": emitter.status,
"device_type": roster_unit.device_type if roster_unit else "seismograph",
"deployed": roster_unit.deployed if roster_unit else False,
"note": roster_unit.note if roster_unit and roster_unit.note else "",
"location": roster_unit.address if roster_unit and roster_unit.address else (roster_unit.location if roster_unit else "")
}
call_ins.append(call_in)
# Apply limit if specified
if limit:
call_ins = call_ins[:limit]
return {
"call_ins": call_ins,
"total": len(call_ins),
"hours": hours,
"time_threshold": time_threshold.isoformat()
}

View File

@@ -1,14 +1,17 @@
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
from fastapi.responses import StreamingResponse
from fastapi.responses import StreamingResponse, FileResponse
from sqlalchemy.orm import Session
from datetime import datetime, date
from pydantic import BaseModel
from typing import Optional
import csv
import io
import shutil
from pathlib import Path
from backend.database import get_db
from backend.models import RosterUnit, Emitter, IgnoredUnit, UserPreferences
from backend.services.database_backup import DatabaseBackupService
router = APIRouter(prefix="/api/settings", tags=["settings"])
@@ -325,3 +328,144 @@ def update_preferences(
"status_pending_threshold_hours": prefs.status_pending_threshold_hours,
"updated_at": prefs.updated_at.isoformat() if prefs.updated_at else None
}
# Database Management Endpoints
backup_service = DatabaseBackupService()
@router.get("/database/stats")
def get_database_stats():
"""Get current database statistics"""
try:
stats = backup_service.get_database_stats()
return stats
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to get database stats: {str(e)}")
@router.post("/database/snapshot")
def create_database_snapshot(description: Optional[str] = None):
"""Create a full database snapshot"""
try:
snapshot = backup_service.create_snapshot(description=description)
return {
"message": "Snapshot created successfully",
"snapshot": snapshot
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Snapshot creation failed: {str(e)}")
@router.get("/database/snapshots")
def list_database_snapshots():
"""List all available database snapshots"""
try:
snapshots = backup_service.list_snapshots()
return {
"snapshots": snapshots,
"count": len(snapshots)
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to list snapshots: {str(e)}")
@router.get("/database/snapshot/{filename}")
def download_snapshot(filename: str):
"""Download a specific snapshot file"""
try:
snapshot_path = backup_service.download_snapshot(filename)
return FileResponse(
path=str(snapshot_path),
filename=filename,
media_type="application/x-sqlite3"
)
except FileNotFoundError:
raise HTTPException(status_code=404, detail=f"Snapshot {filename} not found")
except Exception as e:
raise HTTPException(status_code=500, detail=f"Download failed: {str(e)}")
@router.delete("/database/snapshot/{filename}")
def delete_database_snapshot(filename: str):
"""Delete a specific snapshot"""
try:
backup_service.delete_snapshot(filename)
return {
"message": f"Snapshot {filename} deleted successfully",
"filename": filename
}
except FileNotFoundError:
raise HTTPException(status_code=404, detail=f"Snapshot {filename} not found")
except Exception as e:
raise HTTPException(status_code=500, detail=f"Delete failed: {str(e)}")
class RestoreRequest(BaseModel):
"""Schema for restore request"""
filename: str
create_backup: bool = True
@router.post("/database/restore")
def restore_database(request: RestoreRequest, db: Session = Depends(get_db)):
"""Restore database from a snapshot"""
try:
# Close the database connection before restoring
db.close()
result = backup_service.restore_snapshot(
filename=request.filename,
create_backup_before_restore=request.create_backup
)
return result
except FileNotFoundError:
raise HTTPException(status_code=404, detail=f"Snapshot {request.filename} not found")
except Exception as e:
raise HTTPException(status_code=500, detail=f"Restore failed: {str(e)}")
@router.post("/database/upload-snapshot")
async def upload_snapshot(file: UploadFile = File(...)):
"""Upload a snapshot file to the backups directory"""
if not file.filename.endswith('.db'):
raise HTTPException(status_code=400, detail="File must be a .db file")
try:
# Save uploaded file to backups directory
backups_dir = Path("./data/backups")
backups_dir.mkdir(parents=True, exist_ok=True)
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
uploaded_filename = f"snapshot_uploaded_{timestamp}.db"
file_path = backups_dir / uploaded_filename
# Save file
with open(file_path, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
# Create metadata
metadata = {
"filename": uploaded_filename,
"created_at": timestamp,
"created_at_iso": datetime.utcnow().isoformat(),
"description": f"Uploaded: {file.filename}",
"size_bytes": file_path.stat().st_size,
"size_mb": round(file_path.stat().st_size / (1024 * 1024), 2),
"type": "uploaded"
}
metadata_path = backups_dir / f"{uploaded_filename}.meta.json"
import json
with open(metadata_path, 'w') as f:
json.dump(metadata, f, indent=2)
return {
"message": "Snapshot uploaded successfully",
"snapshot": metadata
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")

View File

@@ -0,0 +1,145 @@
"""
Automatic Database Backup Scheduler
Handles scheduled automatic backups of the database
"""
import schedule
import time
import threading
from datetime import datetime
from typing import Optional
import logging
from backend.services.database_backup import DatabaseBackupService
logger = logging.getLogger(__name__)
class BackupScheduler:
"""Manages automatic database backups on a schedule"""
def __init__(self, db_path: str = "./data/seismo_fleet.db", backups_dir: str = "./data/backups"):
self.backup_service = DatabaseBackupService(db_path=db_path, backups_dir=backups_dir)
self.scheduler_thread: Optional[threading.Thread] = None
self.is_running = False
# Default settings
self.backup_interval_hours = 24 # Daily backups
self.keep_count = 10 # Keep last 10 backups
self.enabled = False
def configure(self, interval_hours: int = 24, keep_count: int = 10, enabled: bool = True):
"""
Configure backup scheduler settings
Args:
interval_hours: Hours between automatic backups
keep_count: Number of backups to retain
enabled: Whether automatic backups are enabled
"""
self.backup_interval_hours = interval_hours
self.keep_count = keep_count
self.enabled = enabled
logger.info(f"Backup scheduler configured: interval={interval_hours}h, keep={keep_count}, enabled={enabled}")
def create_automatic_backup(self):
"""Create an automatic backup and cleanup old ones"""
if not self.enabled:
logger.info("Automatic backups are disabled, skipping")
return
try:
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M UTC")
description = f"Automatic backup - {timestamp}"
logger.info("Creating automatic backup...")
snapshot = self.backup_service.create_snapshot(description=description)
logger.info(f"Automatic backup created: {snapshot['filename']} ({snapshot['size_mb']} MB)")
# Cleanup old backups
cleanup_result = self.backup_service.cleanup_old_snapshots(keep_count=self.keep_count)
if cleanup_result['deleted'] > 0:
logger.info(f"Cleaned up {cleanup_result['deleted']} old snapshots")
return snapshot
except Exception as e:
logger.error(f"Automatic backup failed: {str(e)}")
return None
def start(self):
"""Start the backup scheduler in a background thread"""
if self.is_running:
logger.warning("Backup scheduler is already running")
return
if not self.enabled:
logger.info("Backup scheduler is disabled, not starting")
return
logger.info(f"Starting backup scheduler (every {self.backup_interval_hours} hours)")
# Clear any existing scheduled jobs
schedule.clear()
# Schedule the backup job
schedule.every(self.backup_interval_hours).hours.do(self.create_automatic_backup)
# Also run immediately on startup
self.create_automatic_backup()
# Start the scheduler thread
self.is_running = True
self.scheduler_thread = threading.Thread(target=self._run_scheduler, daemon=True)
self.scheduler_thread.start()
logger.info("Backup scheduler started successfully")
def _run_scheduler(self):
"""Internal method to run the scheduler loop"""
while self.is_running:
schedule.run_pending()
time.sleep(60) # Check every minute
def stop(self):
"""Stop the backup scheduler"""
if not self.is_running:
logger.warning("Backup scheduler is not running")
return
logger.info("Stopping backup scheduler...")
self.is_running = False
schedule.clear()
if self.scheduler_thread:
self.scheduler_thread.join(timeout=5)
logger.info("Backup scheduler stopped")
def get_status(self) -> dict:
"""Get current scheduler status"""
next_run = None
if self.is_running and schedule.jobs:
next_run = schedule.jobs[0].next_run.isoformat() if schedule.jobs[0].next_run else None
return {
"enabled": self.enabled,
"running": self.is_running,
"interval_hours": self.backup_interval_hours,
"keep_count": self.keep_count,
"next_run": next_run
}
# Global scheduler instance
_scheduler_instance: Optional[BackupScheduler] = None
def get_backup_scheduler() -> BackupScheduler:
"""Get or create the global backup scheduler instance"""
global _scheduler_instance
if _scheduler_instance is None:
_scheduler_instance = BackupScheduler()
return _scheduler_instance

View File

@@ -0,0 +1,192 @@
"""
Database Backup and Restore Service
Handles full database snapshots, restoration, and remote synchronization
"""
import os
import shutil
import sqlite3
from datetime import datetime
from pathlib import Path
from typing import List, Dict, Optional
import json
class DatabaseBackupService:
"""Manages database backup operations"""
def __init__(self, db_path: str = "./data/seismo_fleet.db", backups_dir: str = "./data/backups"):
self.db_path = Path(db_path)
self.backups_dir = Path(backups_dir)
self.backups_dir.mkdir(parents=True, exist_ok=True)
def create_snapshot(self, description: Optional[str] = None) -> Dict:
"""
Create a full database snapshot using SQLite backup API
Returns snapshot metadata
"""
if not self.db_path.exists():
raise FileNotFoundError(f"Database not found at {self.db_path}")
# Generate snapshot filename with timestamp
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
snapshot_name = f"snapshot_{timestamp}.db"
snapshot_path = self.backups_dir / snapshot_name
# Get database size before backup
db_size = self.db_path.stat().st_size
try:
# Use SQLite backup API for safe backup (handles concurrent access)
source_conn = sqlite3.connect(str(self.db_path))
dest_conn = sqlite3.connect(str(snapshot_path))
# Perform the backup
with dest_conn:
source_conn.backup(dest_conn)
source_conn.close()
dest_conn.close()
# Create metadata
metadata = {
"filename": snapshot_name,
"created_at": timestamp,
"created_at_iso": datetime.utcnow().isoformat(),
"description": description or "Manual snapshot",
"size_bytes": snapshot_path.stat().st_size,
"size_mb": round(snapshot_path.stat().st_size / (1024 * 1024), 2),
"original_db_size_bytes": db_size,
"type": "manual"
}
# Save metadata as JSON sidecar file
metadata_path = self.backups_dir / f"{snapshot_name}.meta.json"
with open(metadata_path, 'w') as f:
json.dump(metadata, f, indent=2)
return metadata
except Exception as e:
# Clean up partial snapshot if it exists
if snapshot_path.exists():
snapshot_path.unlink()
raise Exception(f"Snapshot creation failed: {str(e)}")
def list_snapshots(self) -> List[Dict]:
"""
List all available snapshots with metadata
Returns list sorted by creation date (newest first)
"""
snapshots = []
for db_file in sorted(self.backups_dir.glob("snapshot_*.db"), reverse=True):
metadata_file = self.backups_dir / f"{db_file.name}.meta.json"
if metadata_file.exists():
with open(metadata_file, 'r') as f:
metadata = json.load(f)
else:
# Fallback for legacy snapshots without metadata
stat_info = db_file.stat()
metadata = {
"filename": db_file.name,
"created_at": datetime.fromtimestamp(stat_info.st_mtime).strftime("%Y%m%d_%H%M%S"),
"created_at_iso": datetime.fromtimestamp(stat_info.st_mtime).isoformat(),
"description": "Legacy snapshot",
"size_bytes": stat_info.st_size,
"size_mb": round(stat_info.st_size / (1024 * 1024), 2),
"type": "manual"
}
snapshots.append(metadata)
return snapshots
def delete_snapshot(self, filename: str) -> bool:
"""Delete a snapshot and its metadata"""
snapshot_path = self.backups_dir / filename
metadata_path = self.backups_dir / f"{filename}.meta.json"
if not snapshot_path.exists():
raise FileNotFoundError(f"Snapshot {filename} not found")
snapshot_path.unlink()
if metadata_path.exists():
metadata_path.unlink()
return True
def restore_snapshot(self, filename: str, create_backup_before_restore: bool = True) -> Dict:
"""
Restore database from a snapshot
Creates a safety backup before restoring if requested
"""
snapshot_path = self.backups_dir / filename
if not snapshot_path.exists():
raise FileNotFoundError(f"Snapshot {filename} not found")
if not self.db_path.exists():
raise FileNotFoundError(f"Database not found at {self.db_path}")
backup_info = None
# Create safety backup before restore
if create_backup_before_restore:
backup_info = self.create_snapshot(description="Auto-backup before restore")
try:
# Replace database file
shutil.copy2(str(snapshot_path), str(self.db_path))
return {
"message": "Database restored successfully",
"restored_from": filename,
"restored_at": datetime.utcnow().isoformat(),
"backup_created": backup_info["filename"] if backup_info else None
}
except Exception as e:
raise Exception(f"Restore failed: {str(e)}")
def get_database_stats(self) -> Dict:
"""Get statistics about the current database"""
if not self.db_path.exists():
return {"error": "Database not found"}
conn = sqlite3.connect(str(self.db_path))
cursor = conn.cursor()
# Get table counts
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'")
tables = cursor.fetchall()
table_stats = {}
total_rows = 0
for (table_name,) in tables:
cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
count = cursor.fetchone()[0]
table_stats[table_name] = count
total_rows += count
conn.close()
db_size = self.db_path.stat().st_size
return {
"database_path": str(self.db_path),
"size_bytes": db_size,
"size_mb": round(db_size / (1024 * 1024), 2),
"total_rows": total_rows,
"tables": table_stats,
"last_modified": datetime.fromtimestamp(self.db_path.stat().st_mtime).isoformat()
}
def download_snapshot(self, filename: str) -> Path:
"""Get the file path for downloading a snapshot"""
snapshot_path = self.backups_dir / filename
if not snapshot_path.exists():
raise FileNotFoundError(f"Snapshot {filename} not found")
return snapshot_path

477
docs/DATABASE_MANAGEMENT.md Normal file
View File

@@ -0,0 +1,477 @@
# Database Management Guide
This guide covers the comprehensive database management features available in the Seismo Fleet Manager, including manual snapshots, restoration, remote cloning, and automatic backups.
## Table of Contents
1. [Manual Database Snapshots](#manual-database-snapshots)
2. [Restore from Snapshot](#restore-from-snapshot)
3. [Download and Upload Snapshots](#download-and-upload-snapshots)
4. [Clone Database to Dev Server](#clone-database-to-dev-server)
5. [Automatic Backup Service](#automatic-backup-service)
6. [API Reference](#api-reference)
---
## Manual Database Snapshots
### Creating a Snapshot via UI
1. Navigate to **Settings****Danger Zone** tab
2. Scroll to the **Database Management** section
3. Click **"Create Snapshot"**
4. Optionally enter a description
5. The snapshot will be created and appear in the "Available Snapshots" list
### Creating a Snapshot via API
```bash
curl -X POST http://localhost:8000/api/settings/database/snapshot \
-H "Content-Type: application/json" \
-d '{"description": "Pre-deployment backup"}'
```
### What Happens
- A full copy of the SQLite database is created using the SQLite backup API
- The snapshot is stored in `./data/backups/` directory
- A metadata JSON file is created alongside the snapshot
- No downtime or interruption to the running application
### Snapshot Files
Snapshots are stored as:
- **Database file**: `snapshot_YYYYMMDD_HHMMSS.db`
- **Metadata file**: `snapshot_YYYYMMDD_HHMMSS.db.meta.json`
Example:
```
data/backups/
├── snapshot_20250101_143022.db
├── snapshot_20250101_143022.db.meta.json
├── snapshot_20250102_080000.db
└── snapshot_20250102_080000.db.meta.json
```
---
## Restore from Snapshot
### Restoring via UI
1. Navigate to **Settings****Danger Zone** tab
2. In the **Available Snapshots** section, find the snapshot you want to restore
3. Click the **restore icon** (circular arrow) next to the snapshot
4. Confirm the restoration warning
5. A safety backup of the current database is automatically created
6. The database is replaced with the snapshot
7. The page reloads automatically
### Restoring via API
```bash
curl -X POST http://localhost:8000/api/settings/database/restore \
-H "Content-Type: application/json" \
-d '{
"filename": "snapshot_20250101_143022.db",
"create_backup": true
}'
```
### Important Notes
- **Always creates a safety backup** before restoring (unless explicitly disabled)
- **Application reload required** - Users should refresh their browsers
- **Atomic operation** - The entire database is replaced at once
- **Cannot be undone** - But you'll have the safety backup
---
## Download and Upload Snapshots
### Download a Snapshot
**Via UI**: Click the download icon next to any snapshot in the list
**Via Browser**:
```
http://localhost:8000/api/settings/database/snapshot/snapshot_20250101_143022.db
```
**Via Command Line**:
```bash
curl -o backup.db http://localhost:8000/api/settings/database/snapshot/snapshot_20250101_143022.db
```
### Upload a Snapshot
**Via UI**:
1. Navigate to **Settings****Danger Zone** tab
2. Find the **Upload Snapshot** section
3. Click **"Choose File"** and select a `.db` file
4. Click **"Upload Snapshot"**
**Via Command Line**:
```bash
curl -X POST http://localhost:8000/api/settings/database/upload-snapshot \
-F "file=@/path/to/your/backup.db"
```
---
## Clone Database to Dev Server
The clone tool allows you to copy the production database to a remote development server over the network.
### Prerequisites
- Remote dev server must have the same Seismo Fleet Manager installation
- Network connectivity between production and dev servers
- Python 3 and `requests` library installed
### Basic Usage
```bash
# Clone current database to dev server
python3 scripts/clone_db_to_dev.py --url https://dev.example.com
# Clone using existing snapshot
python3 scripts/clone_db_to_dev.py \
--url https://dev.example.com \
--snapshot snapshot_20250101_143022.db
# Clone with authentication token
python3 scripts/clone_db_to_dev.py \
--url https://dev.example.com \
--token YOUR_AUTH_TOKEN
```
### What Happens
1. Creates a snapshot of the production database (or uses existing one)
2. Uploads the snapshot to the remote dev server
3. Automatically restores the snapshot on the dev server
4. Creates a safety backup on the dev server before restoring
### Remote Server Setup
The remote dev server needs no special setup - it just needs to be running the same Seismo Fleet Manager application with the database management endpoints enabled.
### Use Cases
- **Testing**: Test changes against production data in a dev environment
- **Debugging**: Investigate production issues with real data safely
- **Training**: Provide realistic data for user training
- **Development**: Build new features with realistic data
---
## Automatic Backup Service
The automatic backup service runs scheduled backups in the background and manages backup retention.
### Configuration
The backup scheduler can be configured programmatically or via environment variables.
**Programmatic Configuration**:
```python
from backend.services.backup_scheduler import get_backup_scheduler
scheduler = get_backup_scheduler()
scheduler.configure(
interval_hours=24, # Backup every 24 hours
keep_count=10, # Keep last 10 backups
enabled=True # Enable automatic backups
)
scheduler.start()
```
**Environment Variables** (add to your `.env` or deployment config):
```bash
AUTO_BACKUP_ENABLED=true
AUTO_BACKUP_INTERVAL_HOURS=24
AUTO_BACKUP_KEEP_COUNT=10
```
### Integration with Application Startup
Add to `backend/main.py`:
```python
from backend.services.backup_scheduler import get_backup_scheduler
@app.on_event("startup")
async def startup_event():
# Start automatic backup scheduler
scheduler = get_backup_scheduler()
scheduler.configure(
interval_hours=24, # Daily backups
keep_count=10, # Keep 10 most recent
enabled=True
)
scheduler.start()
@app.on_event("shutdown")
async def shutdown_event():
# Stop backup scheduler gracefully
scheduler = get_backup_scheduler()
scheduler.stop()
```
### Manual Control
```python
from backend.services.backup_scheduler import get_backup_scheduler
scheduler = get_backup_scheduler()
# Get current status
status = scheduler.get_status()
print(status)
# {'enabled': True, 'running': True, 'interval_hours': 24, 'keep_count': 10, 'next_run': '2025-01-02T14:00:00'}
# Create backup immediately
scheduler.create_automatic_backup()
# Stop scheduler
scheduler.stop()
# Start scheduler
scheduler.start()
```
### Backup Retention
The scheduler automatically deletes old backups based on the `keep_count` setting. For example, if `keep_count=10`, only the 10 most recent backups are kept, and older ones are automatically deleted.
---
## API Reference
### Database Statistics
```http
GET /api/settings/database/stats
```
Returns database size, row counts, and last modified time.
**Response**:
```json
{
"database_path": "./data/seismo_fleet.db",
"size_bytes": 1048576,
"size_mb": 1.0,
"total_rows": 1250,
"tables": {
"roster": 450,
"emitters": 600,
"ignored_units": 50,
"unit_history": 150
},
"last_modified": "2025-01-01T14:30:22"
}
```
### Create Snapshot
```http
POST /api/settings/database/snapshot
Content-Type: application/json
{
"description": "Optional description"
}
```
**Response**:
```json
{
"message": "Snapshot created successfully",
"snapshot": {
"filename": "snapshot_20250101_143022.db",
"created_at": "20250101_143022",
"created_at_iso": "2025-01-01T14:30:22",
"description": "Optional description",
"size_bytes": 1048576,
"size_mb": 1.0,
"type": "manual"
}
}
```
### List Snapshots
```http
GET /api/settings/database/snapshots
```
**Response**:
```json
{
"snapshots": [
{
"filename": "snapshot_20250101_143022.db",
"created_at": "20250101_143022",
"created_at_iso": "2025-01-01T14:30:22",
"description": "Manual backup",
"size_mb": 1.0,
"type": "manual"
}
],
"count": 1
}
```
### Download Snapshot
```http
GET /api/settings/database/snapshot/{filename}
```
Returns the snapshot file as a download.
### Delete Snapshot
```http
DELETE /api/settings/database/snapshot/{filename}
```
### Restore Database
```http
POST /api/settings/database/restore
Content-Type: application/json
{
"filename": "snapshot_20250101_143022.db",
"create_backup": true
}
```
**Response**:
```json
{
"message": "Database restored successfully",
"restored_from": "snapshot_20250101_143022.db",
"restored_at": "2025-01-01T15:00:00",
"backup_created": "snapshot_20250101_150000.db"
}
```
### Upload Snapshot
```http
POST /api/settings/database/upload-snapshot
Content-Type: multipart/form-data
file: <binary data>
```
---
## Best Practices
### 1. Regular Backups
- **Enable automatic backups** with a 24-hour interval
- **Keep at least 7-10 backups** for historical coverage
- **Create manual snapshots** before major changes
### 2. Before Major Operations
Always create a snapshot before:
- Software upgrades
- Bulk data imports
- Database schema changes
- Testing destructive operations
### 3. Testing Restores
Periodically test your restore process:
1. Download a snapshot
2. Test restoration on a dev environment
3. Verify data integrity
### 4. Off-Site Backups
For production systems:
- **Download snapshots** to external storage regularly
- Use the clone tool to **sync to remote servers**
- Store backups in **multiple geographic locations**
### 5. Snapshot Management
- Delete old snapshots when no longer needed
- Use descriptive names/descriptions for manual snapshots
- Keep pre-deployment snapshots separate
---
## Troubleshooting
### Snapshot Creation Fails
**Problem**: "Database is locked" error
**Solution**: The database is being written to. Wait a moment and try again. The SQLite backup API handles most locking automatically.
### Restore Doesn't Complete
**Problem**: Restore appears to hang
**Solution**:
- Check server logs for errors
- Ensure sufficient disk space
- Verify the snapshot file isn't corrupted
### Upload Fails on Dev Server
**Problem**: "Permission denied" or "File too large"
**Solutions**:
- Check file upload size limits in your web server config (nginx/apache)
- Verify write permissions on `./data/backups/` directory
- Ensure sufficient disk space
### Automatic Backups Not Running
**Problem**: No automatic backups being created
**Solutions**:
1. Check if scheduler is enabled: `scheduler.get_status()`
2. Check application logs for scheduler errors
3. Ensure `schedule` library is installed: `pip install schedule`
4. Verify scheduler was started in application startup
---
## Security Considerations
1. **Access Control**: Restrict access to the Settings → Danger Zone to administrators only
2. **Backup Storage**: Store backups in a secure location with proper permissions
3. **Remote Cloning**: Use authentication tokens when cloning to remote servers
4. **Data Sensitivity**: Remember that snapshots contain all database data - treat them with the same security as the live database
---
## File Locations
- **Database**: `./data/seismo_fleet.db`
- **Backups Directory**: `./data/backups/`
- **Clone Script**: `./scripts/clone_db_to_dev.py`
- **Backup Service**: `./backend/services/database_backup.py`
- **Scheduler Service**: `./backend/services/backup_scheduler.py`
---
## Support
For issues or questions:
1. Check application logs in `./logs/`
2. Review this documentation
3. Test with a small database first
4. Contact your system administrator

149
scripts/clone_db_to_dev.py Executable file
View File

@@ -0,0 +1,149 @@
#!/usr/bin/env python3
"""
Clone Production Database to Dev Server
Helper script to clone the production database to a remote development server
"""
import argparse
import requests
from pathlib import Path
import sys
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from backend.services.database_backup import DatabaseBackupService
def clone_to_dev(remote_url: str, snapshot_filename: str = None, auth_token: str = None):
"""Clone database to remote dev server"""
backup_service = DatabaseBackupService()
print(f"🔄 Cloning database to {remote_url}...")
try:
# If no snapshot specified, create a new one
if snapshot_filename:
print(f"📦 Using existing snapshot: {snapshot_filename}")
snapshot_path = backup_service.backups_dir / snapshot_filename
if not snapshot_path.exists():
print(f"❌ Error: Snapshot {snapshot_filename} not found")
return False
else:
print("📸 Creating new snapshot...")
snapshot_info = backup_service.create_snapshot(description="Clone to dev server")
snapshot_filename = snapshot_info["filename"]
snapshot_path = backup_service.backups_dir / snapshot_filename
print(f"✅ Snapshot created: {snapshot_filename} ({snapshot_info['size_mb']} MB)")
# Upload to remote server
print(f"📤 Uploading to {remote_url}...")
headers = {}
if auth_token:
headers["Authorization"] = f"Bearer {auth_token}"
with open(snapshot_path, 'rb') as f:
files = {'file': (snapshot_filename, f, 'application/x-sqlite3')}
response = requests.post(
f"{remote_url.rstrip('/')}/api/settings/database/upload-snapshot",
files=files,
headers=headers,
timeout=300
)
response.raise_for_status()
result = response.json()
print(f"✅ Upload successful!")
print(f" Remote filename: {result['snapshot']['filename']}")
print(f" Size: {result['snapshot']['size_mb']} MB")
# Now restore on remote server
print("🔄 Restoring on remote server...")
restore_response = requests.post(
f"{remote_url.rstrip('/')}/api/settings/database/restore",
json={
"filename": result['snapshot']['filename'],
"create_backup": True
},
headers=headers,
timeout=60
)
restore_response.raise_for_status()
restore_result = restore_response.json()
print(f"✅ Database cloned successfully!")
print(f" Restored from: {restore_result['restored_from']}")
print(f" Remote backup created: {restore_result.get('backup_created', 'N/A')}")
return True
except requests.exceptions.RequestException as e:
print(f"❌ Network error: {str(e)}")
return False
except Exception as e:
print(f"❌ Error: {str(e)}")
return False
def main():
parser = argparse.ArgumentParser(
description="Clone production database to development server",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Clone current database to dev server
python clone_db_to_dev.py --url https://dev.example.com
# Clone using existing snapshot
python clone_db_to_dev.py --url https://dev.example.com --snapshot snapshot_20250101_120000.db
# Clone with authentication
python clone_db_to_dev.py --url https://dev.example.com --token YOUR_TOKEN
"""
)
parser.add_argument(
'--url',
required=True,
help='Remote dev server URL (e.g., https://dev.example.com)'
)
parser.add_argument(
'--snapshot',
help='Use existing snapshot instead of creating new one'
)
parser.add_argument(
'--token',
help='Authentication token for remote server'
)
args = parser.parse_args()
print("=" * 60)
print(" Database Cloning Tool - Production to Dev")
print("=" * 60)
print()
success = clone_to_dev(
remote_url=args.url,
snapshot_filename=args.snapshot,
auth_token=args.token
)
print()
if success:
print("🎉 Cloning completed successfully!")
sys.exit(0)
else:
print("💥 Cloning failed")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -116,28 +116,28 @@
</div>
</div>
<!-- Recent Photos Card -->
<div class="rounded-xl shadow-lg bg-white dark:bg-slate-800 p-6" id="recent-photos-card">
<div class="flex items-center justify-between mb-4 cursor-pointer md:cursor-default" onclick="toggleCard('recent-photos')">
<h2 class="text-lg font-semibold text-gray-900 dark:text-white">Recent Photos</h2>
<!-- Recently Called In Units Card -->
<div class="rounded-xl shadow-lg bg-white dark:bg-slate-800 p-6" id="recent-callins-card">
<div class="flex items-center justify-between mb-4 cursor-pointer md:cursor-default" onclick="toggleCard('recent-callins')">
<h2 class="text-lg font-semibold text-gray-900 dark:text-white">Recent Call-Ins</h2>
<div class="flex items-center gap-2">
<svg class="w-6 h-6 text-seismo-burgundy" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z">
d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z">
</path>
</svg>
<svg class="w-5 h-5 text-gray-500 transition-transform md:hidden chevron" id="recent-photos-chevron" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<svg class="w-5 h-5 text-gray-500 transition-transform md:hidden chevron" id="recent-callins-chevron" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 9l-7 7-7-7"></path>
</svg>
</div>
</div>
<div class="text-center text-gray-500 dark:text-gray-400 card-content" id="recent-photos-content">
<svg class="w-16 h-16 mx-auto mb-2 opacity-50" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z">
</path>
</svg>
<p class="text-sm">No recent photos</p>
<div class="card-content" id="recent-callins-content">
<div id="recent-callins-list" class="space-y-2">
<p class="text-sm text-gray-500 dark:text-gray-400">Loading recent call-ins...</p>
</div>
<button id="show-all-callins" class="hidden mt-3 w-full text-center text-sm text-seismo-orange hover:text-seismo-burgundy font-medium">
Show all recent call-ins
</button>
</div>
</div>
@@ -295,7 +295,7 @@ function toggleCard(cardName) {
// Restore card states from localStorage on page load
function restoreCardStates() {
const cardStates = JSON.parse(localStorage.getItem('dashboardCardStates') || '{}');
const cardNames = ['fleet-summary', 'recent-alerts', 'recent-photos', 'fleet-map', 'fleet-status'];
const cardNames = ['fleet-summary', 'recent-alerts', 'recent-callins', 'fleet-map', 'fleet-status'];
cardNames.forEach(cardName => {
const content = document.getElementById(`${cardName}-content`);
@@ -531,6 +531,90 @@ async function loadRecentPhotos() {
// Load recent photos on page load and refresh every 30 seconds
loadRecentPhotos();
setInterval(loadRecentPhotos, 30000);
// Load and display recent call-ins
let showingAllCallins = false;
const DEFAULT_CALLINS_DISPLAY = 5;
async function loadRecentCallins() {
try {
const response = await fetch('/api/recent-callins?hours=6');
if (!response.ok) {
throw new Error('Failed to load recent call-ins');
}
const data = await response.json();
const callinsList = document.getElementById('recent-callins-list');
const showAllButton = document.getElementById('show-all-callins');
if (data.call_ins && data.call_ins.length > 0) {
// Determine how many to show
const displayCount = showingAllCallins ? data.call_ins.length : Math.min(DEFAULT_CALLINS_DISPLAY, data.call_ins.length);
const callinsToDisplay = data.call_ins.slice(0, displayCount);
// Build HTML for call-ins list
let html = '';
callinsToDisplay.forEach(callin => {
// Status color
const statusColor = callin.status === 'OK' ? 'green' : callin.status === 'Pending' ? 'yellow' : 'red';
const statusClass = callin.status === 'OK' ? 'bg-green-500' : callin.status === 'Pending' ? 'bg-yellow-500' : 'bg-red-500';
// Build location/note line
let subtitle = '';
if (callin.location) {
subtitle = callin.location;
} else if (callin.note) {
subtitle = callin.note;
}
html += `
<div class="flex items-center justify-between py-2 border-b border-gray-200 dark:border-gray-700 last:border-0">
<div class="flex items-center space-x-3">
<span class="w-2 h-2 rounded-full ${statusClass}"></span>
<div>
<a href="/unit/${callin.unit_id}" class="font-medium text-gray-900 dark:text-white hover:text-seismo-orange">
${callin.unit_id}
</a>
${subtitle ? `<p class="text-xs text-gray-500 dark:text-gray-400">${subtitle}</p>` : ''}
</div>
</div>
<span class="text-sm text-gray-600 dark:text-gray-400">${callin.time_ago}</span>
</div>`;
});
callinsList.innerHTML = html;
// Show/hide the "Show all" button
if (data.call_ins.length > DEFAULT_CALLINS_DISPLAY) {
showAllButton.classList.remove('hidden');
showAllButton.textContent = showingAllCallins
? `Show fewer (${DEFAULT_CALLINS_DISPLAY})`
: `Show all (${data.call_ins.length})`;
} else {
showAllButton.classList.add('hidden');
}
} else {
callinsList.innerHTML = '<p class="text-sm text-gray-500 dark:text-gray-400">No units have called in within the past 6 hours</p>';
showAllButton.classList.add('hidden');
}
} catch (error) {
console.error('Error loading recent call-ins:', error);
document.getElementById('recent-callins-list').innerHTML = '<p class="text-sm text-red-500">Failed to load recent call-ins</p>';
}
}
// Toggle show all/show fewer
document.addEventListener('DOMContentLoaded', function() {
const showAllButton = document.getElementById('show-all-callins');
showAllButton.addEventListener('click', function() {
showingAllCallins = !showingAllCallins;
loadRecentCallins();
});
});
// Load recent call-ins on page load and refresh every 30 seconds
loadRecentCallins();
setInterval(loadRecentCallins, 30000);
</script>
{% endblock %}

View File

@@ -401,6 +401,99 @@
</button>
</div>
</div>
<!-- DATABASE MANAGEMENT SECTION -->
<div class="mt-8 mb-4">
<h2 class="text-2xl font-bold text-gray-900 dark:text-white">Database Management</h2>
<p class="text-sm text-gray-600 dark:text-gray-400 mt-1">Create snapshots, restore backups, and manage database files</p>
</div>
<!-- Database Statistics -->
<div class="border-2 border-blue-300 dark:border-blue-800 rounded-lg p-6 bg-white dark:bg-slate-800">
<h3 class="font-semibold text-blue-600 dark:text-blue-400 text-lg mb-3">Database Statistics</h3>
<div id="dbStatsLoading" class="text-center py-4">
<div class="inline-block animate-spin rounded-full h-6 w-6 border-b-2 border-blue-600"></div>
</div>
<div id="dbStatsContent" class="hidden">
<div class="grid grid-cols-2 md:grid-cols-4 gap-4 text-sm">
<div>
<p class="text-gray-500 dark:text-gray-400">Database Size</p>
<p id="dbSize" class="text-lg font-semibold text-gray-900 dark:text-white">-</p>
</div>
<div>
<p class="text-gray-500 dark:text-gray-400">Total Rows</p>
<p id="dbRows" class="text-lg font-semibold text-gray-900 dark:text-white">-</p>
</div>
<div>
<p class="text-gray-500 dark:text-gray-400">Last Modified</p>
<p id="dbModified" class="text-lg font-semibold text-gray-900 dark:text-white">-</p>
</div>
<div>
<p class="text-gray-500 dark:text-gray-400">Snapshots</p>
<p id="dbSnapshotCount" class="text-lg font-semibold text-gray-900 dark:text-white">-</p>
</div>
</div>
</div>
<button onclick="loadDatabaseStats()" class="mt-4 px-4 py-2 text-blue-600 hover:bg-blue-50 dark:hover:bg-blue-900/20 rounded-lg transition-colors text-sm">
Refresh Stats
</button>
</div>
<!-- Create Snapshot -->
<div class="border border-green-200 dark:border-green-800 rounded-lg p-6 bg-white dark:bg-slate-800">
<div class="flex justify-between items-start">
<div class="flex-1">
<h3 class="font-semibold text-green-600 dark:text-green-400">Create Database Snapshot</h3>
<p class="text-sm text-gray-600 dark:text-gray-400 mt-1">
Create a full backup of the current database state
</p>
</div>
<button onclick="createSnapshot()" class="px-4 py-2 bg-green-600 hover:bg-green-700 text-white rounded-lg transition-colors whitespace-nowrap">
Create Snapshot
</button>
</div>
</div>
<!-- Snapshots List -->
<div class="border border-gray-200 dark:border-gray-700 rounded-lg p-6 bg-white dark:bg-slate-800">
<div class="flex justify-between items-center mb-4">
<h3 class="font-semibold text-gray-900 dark:text-white">Available Snapshots</h3>
<button onclick="loadSnapshots()" class="px-3 py-1 text-sm text-seismo-orange hover:bg-orange-50 dark:hover:bg-orange-900/20 rounded transition-colors">
Refresh
</button>
</div>
<div id="snapshotsLoading" class="text-center py-4">
<div class="inline-block animate-spin rounded-full h-6 w-6 border-b-2 border-seismo-orange"></div>
</div>
<div id="snapshotsList" class="hidden space-y-2">
<!-- Snapshots will be inserted here -->
</div>
<div id="snapshotsEmpty" class="hidden text-center py-8 text-gray-500 dark:text-gray-400">
<svg class="mx-auto h-12 w-12 text-gray-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 8h14M5 8a2 2 0 110-4h14a2 2 0 110 4M5 8v10a2 2 0 002 2h10a2 2 0 002-2V8m-9 4h4"></path>
</svg>
<p class="mt-2">No snapshots found</p>
<p class="text-sm">Create your first snapshot above</p>
</div>
</div>
<!-- Upload Snapshot -->
<div class="border border-purple-200 dark:border-purple-800 rounded-lg p-6 bg-white dark:bg-slate-800">
<h3 class="font-semibold text-purple-600 dark:text-purple-400 mb-2">Upload Snapshot</h3>
<p class="text-sm text-gray-600 dark:text-gray-400 mb-4">
Upload a database snapshot file from another server
</p>
<form id="uploadSnapshotForm" class="space-y-3">
<input type="file" accept=".db" id="snapshotFileInput" class="block w-full text-sm text-gray-900 dark:text-white border border-gray-300 dark:border-gray-600 rounded-lg cursor-pointer bg-gray-50 dark:bg-slate-700">
<button type="submit" class="px-4 py-2 bg-purple-600 hover:bg-purple-700 text-white rounded-lg transition-colors">
Upload Snapshot
</button>
</form>
<div id="uploadResult" class="hidden mt-3"></div>
</div>
</div>
</div>
@@ -1004,5 +1097,263 @@ async function confirmClearIgnored() {
alert('❌ Error: ' + error.message);
}
}
// ========== DATABASE MANAGEMENT ==========
async function loadDatabaseStats() {
const loading = document.getElementById('dbStatsLoading');
const content = document.getElementById('dbStatsContent');
try {
loading.classList.remove('hidden');
content.classList.add('hidden');
const response = await fetch('/api/settings/database/stats');
const stats = await response.json();
// Update stats display
document.getElementById('dbSize').textContent = stats.size_mb + ' MB';
document.getElementById('dbRows').textContent = stats.total_rows.toLocaleString();
const lastMod = new Date(stats.last_modified);
document.getElementById('dbModified').textContent = lastMod.toLocaleDateString();
// Load snapshot count
const snapshotsResp = await fetch('/api/settings/database/snapshots');
const snapshotsData = await snapshotsResp.json();
document.getElementById('dbSnapshotCount').textContent = snapshotsData.count;
loading.classList.add('hidden');
content.classList.remove('hidden');
} catch (error) {
loading.classList.add('hidden');
alert('Error loading database stats: ' + error.message);
}
}
async function createSnapshot() {
const description = prompt('Enter a description for this snapshot (optional):');
try {
const response = await fetch('/api/settings/database/snapshot', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ description: description || null })
});
const result = await response.json();
if (response.ok) {
alert(`✅ Snapshot created successfully!\n\nFilename: ${result.snapshot.filename}\nSize: ${result.snapshot.size_mb} MB`);
loadSnapshots();
loadDatabaseStats();
} else {
alert('❌ Error: ' + (result.detail || 'Unknown error'));
}
} catch (error) {
alert('❌ Error: ' + error.message);
}
}
async function loadSnapshots() {
const loading = document.getElementById('snapshotsLoading');
const list = document.getElementById('snapshotsList');
const empty = document.getElementById('snapshotsEmpty');
try {
loading.classList.remove('hidden');
list.classList.add('hidden');
empty.classList.add('hidden');
const response = await fetch('/api/settings/database/snapshots');
const data = await response.json();
if (data.snapshots.length === 0) {
loading.classList.add('hidden');
empty.classList.remove('hidden');
return;
}
list.innerHTML = data.snapshots.map(snapshot => createSnapshotCard(snapshot)).join('');
loading.classList.add('hidden');
list.classList.remove('hidden');
} catch (error) {
loading.classList.add('hidden');
alert('Error loading snapshots: ' + error.message);
}
}
function createSnapshotCard(snapshot) {
const createdDate = new Date(snapshot.created_at_iso);
const dateStr = createdDate.toLocaleString();
return `
<div class="border border-gray-200 dark:border-gray-700 rounded-lg p-4 bg-gray-50 dark:bg-gray-700/50">
<div class="flex justify-between items-start">
<div class="flex-1">
<div class="flex items-center gap-2">
<h4 class="font-medium text-gray-900 dark:text-white">${snapshot.filename}</h4>
<span class="text-xs px-2 py-1 rounded ${snapshot.type === 'manual' ? 'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-300' : 'bg-purple-100 text-purple-800 dark:bg-purple-900 dark:text-purple-300'}">
${snapshot.type}
</span>
</div>
<p class="text-sm text-gray-600 dark:text-gray-400 mt-1">${snapshot.description}</p>
<div class="flex gap-4 mt-2 text-xs text-gray-500 dark:text-gray-400">
<span>📅 ${dateStr}</span>
<span>💾 ${snapshot.size_mb} MB</span>
</div>
</div>
<div class="flex gap-2 ml-4">
<button onclick="downloadSnapshot('${snapshot.filename}')"
class="p-2 hover:bg-gray-200 dark:hover:bg-gray-600 rounded transition-colors text-blue-600 dark:text-blue-400"
title="Download">
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4"></path>
</svg>
</button>
<button onclick="restoreSnapshot('${snapshot.filename}')"
class="p-2 hover:bg-gray-200 dark:hover:bg-gray-600 rounded transition-colors text-green-600 dark:text-green-400"
title="Restore">
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15"></path>
</svg>
</button>
<button onclick="deleteSnapshot('${snapshot.filename}')"
class="p-2 hover:bg-gray-200 dark:hover:bg-gray-600 rounded transition-colors text-red-600 dark:text-red-400"
title="Delete">
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"></path>
</svg>
</button>
</div>
</div>
</div>
`;
}
function downloadSnapshot(filename) {
window.location.href = `/api/settings/database/snapshot/${filename}`;
}
async function restoreSnapshot(filename) {
const confirmMsg = `⚠️ RESTORE DATABASE WARNING ⚠️
This will REPLACE the current database with snapshot:
${filename}
A backup of the current database will be created automatically before restoring.
THIS ACTION WILL RESTART THE APPLICATION!
Continue?`;
if (!confirm(confirmMsg)) {
return;
}
try {
const response = await fetch('/api/settings/database/restore', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
filename: filename,
create_backup: true
})
});
const result = await response.json();
if (response.ok) {
alert(`✅ Database restored successfully!\n\nRestored from: ${result.restored_from}\nBackup created: ${result.backup_created}\n\nThe page will now reload.`);
location.reload();
} else {
alert('❌ Error: ' + (result.detail || 'Unknown error'));
}
} catch (error) {
alert('❌ Error: ' + error.message);
}
}
async function deleteSnapshot(filename) {
if (!confirm(`Delete snapshot ${filename}?\n\nThis cannot be undone.`)) {
return;
}
try {
const response = await fetch(`/api/settings/database/snapshot/${filename}`, {
method: 'DELETE'
});
const result = await response.json();
if (response.ok) {
alert(`✅ Snapshot deleted: ${filename}`);
loadSnapshots();
loadDatabaseStats();
} else {
alert('❌ Error: ' + (result.detail || 'Unknown error'));
}
} catch (error) {
alert('❌ Error: ' + error.message);
}
}
// Upload snapshot form handler
document.getElementById('uploadSnapshotForm').addEventListener('submit', async function(e) {
e.preventDefault();
const fileInput = document.getElementById('snapshotFileInput');
const resultDiv = document.getElementById('uploadResult');
if (!fileInput.files[0]) {
alert('Please select a file');
return;
}
const formData = new FormData();
formData.append('file', fileInput.files[0]);
try {
const response = await fetch('/api/settings/database/upload-snapshot', {
method: 'POST',
body: formData
});
const result = await response.json();
if (response.ok) {
resultDiv.className = 'mt-3 p-3 rounded-lg bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200';
resultDiv.innerHTML = `✅ Uploaded: ${result.snapshot.filename} (${result.snapshot.size_mb} MB)`;
resultDiv.classList.remove('hidden');
fileInput.value = '';
loadSnapshots();
loadDatabaseStats();
setTimeout(() => {
resultDiv.classList.add('hidden');
}, 5000);
} else {
resultDiv.className = 'mt-3 p-3 rounded-lg bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200';
resultDiv.innerHTML = `❌ Error: ${result.detail || 'Unknown error'}`;
resultDiv.classList.remove('hidden');
}
} catch (error) {
resultDiv.className = 'mt-3 p-3 rounded-lg bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200';
resultDiv.innerHTML = `❌ Error: ${error.message}`;
resultDiv.classList.remove('hidden');
}
});
// Load database stats and snapshots when danger zone tab is shown
const originalShowTab = showTab;
showTab = function(tabName) {
originalShowTab(tabName);
if (tabName === 'danger') {
loadDatabaseStats();
loadSnapshots();
}
};
</script>
{% endblock %}