chore: docs/scripts cleaned up
This commit is contained in:
@@ -1,120 +1,20 @@
|
||||
# Helper Scripts
|
||||
# Terra-View Utility Scripts
|
||||
|
||||
This directory contains helper scripts for database management and testing.
|
||||
This directory contains utility scripts for database operations, testing, and maintenance.
|
||||
|
||||
## Database Migration Scripts
|
||||
## Scripts
|
||||
|
||||
### migrate_dev_db.py
|
||||
Migrates the DEV database schema to add SLM-specific columns to the `roster` table.
|
||||
### create_test_db.py
|
||||
Generate a realistic test database with sample data.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
cd /home/serversdown/sfm/seismo-fleet-manager
|
||||
python3 scripts/migrate_dev_db.py
|
||||
```
|
||||
Usage: python scripts/create_test_db.py
|
||||
|
||||
**What it does:**
|
||||
- Adds 8 SLM-specific columns to the DEV database (data-dev/seismo_fleet.db)
|
||||
- Columns: slm_host, slm_tcp_port, slm_model, slm_serial_number, slm_frequency_weighting, slm_time_weighting, slm_measurement_range, slm_last_check
|
||||
- Safe to run multiple times (skips existing columns)
|
||||
### rename_unit.py
|
||||
Rename a unit ID across all tables.
|
||||
|
||||
### update_dev_db_schema.py
|
||||
Inspects and displays the DEV database schema.
|
||||
Usage: python scripts/rename_unit.py <old_id> <new_id>
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
python3 scripts/update_dev_db_schema.py
|
||||
```
|
||||
### sync_slms_to_slmm.py
|
||||
Manually sync all SLM devices from Terra-View to SLMM.
|
||||
|
||||
**What it does:**
|
||||
- Shows all tables in the DEV database
|
||||
- Lists all columns in the roster table
|
||||
- Useful for verifying schema after migrations
|
||||
|
||||
## Test Data Scripts
|
||||
|
||||
### add_test_slms.py
|
||||
Adds test Sound Level Meter units to the DEV database.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
python3 scripts/add_test_slms.py
|
||||
```
|
||||
|
||||
**What it creates:**
|
||||
- nl43-001: NL-43 SLM at Construction Site A
|
||||
- nl43-002: NL-43 SLM at Construction Site B
|
||||
- nl53-001: NL-53 SLM at Residential Area
|
||||
- nl43-003: NL-43 SLM (not deployed, spare unit)
|
||||
|
||||
### add_test_modems.py
|
||||
Adds test modem units to the DEV database and assigns them to SLMs.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
python3 scripts/add_test_modems.py
|
||||
```
|
||||
|
||||
**What it creates:**
|
||||
- modem-001, modem-002, modem-003: Deployed modems (Raven XTV and Sierra Wireless)
|
||||
- modem-004: Spare modem (not deployed)
|
||||
|
||||
**Modem assignments:**
|
||||
- nl43-001 → modem-001
|
||||
- nl43-002 → modem-002
|
||||
- nl53-001 → modem-003
|
||||
|
||||
## Cleanup Scripts
|
||||
|
||||
### remove_test_data_from_prod.py
|
||||
**⚠️ PRODUCTION DATABASE CLEANUP**
|
||||
|
||||
Removes test data from the production database (data/seismo_fleet.db).
|
||||
|
||||
**Status:** Already executed successfully. Production database is clean.
|
||||
|
||||
**What it removed:**
|
||||
- All test SLM units (nl43-001, nl43-002, nl53-001, nl43-003)
|
||||
- All test modem units (modem-001, modem-002, modem-003, modem-004)
|
||||
|
||||
## Database Cloning
|
||||
|
||||
### clone_db_to_dev.py
|
||||
Clones the production database to create/update the DEV database.
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
python3 scripts/clone_db_to_dev.py
|
||||
```
|
||||
|
||||
**What it does:**
|
||||
- Copies data/seismo_fleet.db → data-dev/seismo_fleet.db
|
||||
- Useful for syncing DEV database with production schema/data
|
||||
|
||||
## Setup Sequence
|
||||
|
||||
To set up a fresh DEV database with test data:
|
||||
|
||||
```bash
|
||||
cd /home/serversdown/sfm/seismo-fleet-manager
|
||||
|
||||
# 1. Fix permissions (if needed)
|
||||
sudo chown -R serversdown:serversdown data-dev/
|
||||
|
||||
# 2. Migrate schema
|
||||
python3 scripts/migrate_dev_db.py
|
||||
|
||||
# 3. Add test data
|
||||
python3 scripts/add_test_slms.py
|
||||
python3 scripts/add_test_modems.py
|
||||
|
||||
# 4. Verify
|
||||
sqlite3 data-dev/seismo_fleet.db "SELECT id, device_type FROM roster WHERE device_type IN ('sound_level_meter', 'modem');"
|
||||
```
|
||||
|
||||
## Important Notes
|
||||
|
||||
- **DEV Database**: `data-dev/seismo_fleet.db` - Used for development and testing
|
||||
- **Production Database**: `data/seismo_fleet.db` - Used by the running application
|
||||
- All test scripts are configured to use the DEV database only
|
||||
- Never run test data scripts against production
|
||||
Usage: python scripts/sync_slms_to_slmm.py
|
||||
|
||||
115
scripts/create_test_db.py
Normal file
115
scripts/create_test_db.py
Normal file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Create a fresh test database with the new schema and some sample data.
|
||||
"""
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from datetime import datetime, date, timedelta
|
||||
from backend.models import Base, RosterUnit, Emitter
|
||||
|
||||
# Create a new test database
|
||||
TEST_DB_PATH = "/tmp/sfm_test.db"
|
||||
engine = create_engine(f"sqlite:///{TEST_DB_PATH}", connect_args={"check_same_thread": False})
|
||||
|
||||
# Drop all tables and recreate them with the new schema
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
# Create a session
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Add some test seismographs
|
||||
seismo1 = RosterUnit(
|
||||
id="BE9449",
|
||||
device_type="seismograph",
|
||||
unit_type="series3",
|
||||
deployed=True,
|
||||
note="Primary field unit",
|
||||
project_id="PROJ-001",
|
||||
location="Site A",
|
||||
last_calibrated=date(2024, 1, 15),
|
||||
next_calibration_due=date(2025, 1, 15),
|
||||
deployed_with_modem_id="MDM001",
|
||||
last_updated=datetime.utcnow(),
|
||||
)
|
||||
|
||||
seismo2 = RosterUnit(
|
||||
id="BE9450",
|
||||
device_type="seismograph",
|
||||
unit_type="series3",
|
||||
deployed=False,
|
||||
note="Benched for maintenance",
|
||||
project_id="PROJ-001",
|
||||
location="Warehouse",
|
||||
last_calibrated=date(2023, 6, 20),
|
||||
next_calibration_due=date(2024, 6, 20), # Past due
|
||||
last_updated=datetime.utcnow(),
|
||||
)
|
||||
|
||||
# Add some test modems
|
||||
modem1 = RosterUnit(
|
||||
id="MDM001",
|
||||
device_type="modem",
|
||||
unit_type="modem",
|
||||
deployed=True,
|
||||
note="Paired with BE9449",
|
||||
project_id="PROJ-001",
|
||||
location="Site A",
|
||||
ip_address="192.168.1.100",
|
||||
phone_number="+1-555-0123",
|
||||
hardware_model="Raven XTV",
|
||||
last_updated=datetime.utcnow(),
|
||||
)
|
||||
|
||||
modem2 = RosterUnit(
|
||||
id="MDM002",
|
||||
device_type="modem",
|
||||
unit_type="modem",
|
||||
deployed=False,
|
||||
note="Spare modem",
|
||||
project_id="PROJ-001",
|
||||
location="Warehouse",
|
||||
ip_address="192.168.1.101",
|
||||
phone_number="+1-555-0124",
|
||||
hardware_model="Raven XT",
|
||||
last_updated=datetime.utcnow(),
|
||||
)
|
||||
|
||||
# Add test emitters (status reports)
|
||||
emitter1 = Emitter(
|
||||
id="BE9449",
|
||||
unit_type="series3",
|
||||
last_seen=datetime.utcnow() - timedelta(hours=2),
|
||||
last_file="BE9449.2024.336.12.00.mseed",
|
||||
status="OK",
|
||||
notes="Running normally",
|
||||
)
|
||||
|
||||
emitter2 = Emitter(
|
||||
id="BE9450",
|
||||
unit_type="series3",
|
||||
last_seen=datetime.utcnow() - timedelta(days=30),
|
||||
last_file="BE9450.2024.306.08.00.mseed",
|
||||
status="Missing",
|
||||
notes="No data received",
|
||||
)
|
||||
|
||||
# Add all units
|
||||
db.add_all([seismo1, seismo2, modem1, modem2, emitter1, emitter2])
|
||||
db.commit()
|
||||
|
||||
print(f"✓ Test database created at {TEST_DB_PATH}")
|
||||
print(f"✓ Added 2 seismographs (BE9449, BE9450)")
|
||||
print(f"✓ Added 2 modems (MDM001, MDM002)")
|
||||
print(f"✓ Added 2 emitter status reports")
|
||||
print(f"\nDatabase is ready for testing!")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating test database: {e}")
|
||||
db.rollback()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
138
scripts/rename_unit.py
Normal file
138
scripts/rename_unit.py
Normal file
@@ -0,0 +1,138 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to rename a unit ID in the database.
|
||||
This updates the unit across all tables with proper foreign key handling.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
DATABASE_URL = "sqlite:///data/sfm.db"
|
||||
|
||||
def rename_unit(old_id: str, new_id: str):
|
||||
"""
|
||||
Rename a unit ID across all relevant tables.
|
||||
|
||||
Args:
|
||||
old_id: Current unit ID (e.g., "SLM4301")
|
||||
new_id: New unit ID (e.g., "SLM-43-01")
|
||||
"""
|
||||
engine = create_engine(DATABASE_URL)
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
|
||||
try:
|
||||
# Check if old unit exists
|
||||
result = session.execute(
|
||||
text("SELECT id, device_type FROM roster WHERE id = :old_id"),
|
||||
{"old_id": old_id}
|
||||
).fetchone()
|
||||
|
||||
if not result:
|
||||
print(f"❌ Error: Unit '{old_id}' not found in roster")
|
||||
return False
|
||||
|
||||
device_type = result[1]
|
||||
print(f"✓ Found unit '{old_id}' (device_type: {device_type})")
|
||||
|
||||
# Check if new ID already exists
|
||||
result = session.execute(
|
||||
text("SELECT id FROM roster WHERE id = :new_id"),
|
||||
{"new_id": new_id}
|
||||
).fetchone()
|
||||
|
||||
if result:
|
||||
print(f"❌ Error: Unit ID '{new_id}' already exists")
|
||||
return False
|
||||
|
||||
print(f"\n🔄 Renaming '{old_id}' → '{new_id}'...\n")
|
||||
|
||||
# Update roster table (primary)
|
||||
session.execute(
|
||||
text("UPDATE roster SET id = :new_id WHERE id = :old_id"),
|
||||
{"new_id": new_id, "old_id": old_id}
|
||||
)
|
||||
print(f" ✓ Updated roster")
|
||||
|
||||
# Update emitters table
|
||||
result = session.execute(
|
||||
text("UPDATE emitters SET id = :new_id WHERE id = :old_id"),
|
||||
{"new_id": new_id, "old_id": old_id}
|
||||
)
|
||||
if result.rowcount > 0:
|
||||
print(f" ✓ Updated emitters ({result.rowcount} rows)")
|
||||
|
||||
# Update unit_history table
|
||||
result = session.execute(
|
||||
text("UPDATE unit_history SET unit_id = :new_id WHERE unit_id = :old_id"),
|
||||
{"new_id": new_id, "old_id": old_id}
|
||||
)
|
||||
if result.rowcount > 0:
|
||||
print(f" ✓ Updated unit_history ({result.rowcount} rows)")
|
||||
|
||||
# Update deployed_with_modem_id references
|
||||
result = session.execute(
|
||||
text("UPDATE roster SET deployed_with_modem_id = :new_id WHERE deployed_with_modem_id = :old_id"),
|
||||
{"new_id": new_id, "old_id": old_id}
|
||||
)
|
||||
if result.rowcount > 0:
|
||||
print(f" ✓ Updated modem references ({result.rowcount} rows)")
|
||||
|
||||
# Update unit_assignments table (if exists)
|
||||
try:
|
||||
result = session.execute(
|
||||
text("UPDATE unit_assignments SET unit_id = :new_id WHERE unit_id = :old_id"),
|
||||
{"new_id": new_id, "old_id": old_id}
|
||||
)
|
||||
if result.rowcount > 0:
|
||||
print(f" ✓ Updated unit_assignments ({result.rowcount} rows)")
|
||||
except Exception:
|
||||
pass # Table may not exist
|
||||
|
||||
# Update recording_sessions table (if exists)
|
||||
try:
|
||||
result = session.execute(
|
||||
text("UPDATE recording_sessions SET unit_id = :new_id WHERE unit_id = :old_id"),
|
||||
{"new_id": new_id, "old_id": old_id}
|
||||
)
|
||||
if result.rowcount > 0:
|
||||
print(f" ✓ Updated recording_sessions ({result.rowcount} rows)")
|
||||
except Exception:
|
||||
pass # Table may not exist
|
||||
|
||||
# Commit all changes
|
||||
session.commit()
|
||||
print(f"\n✅ Successfully renamed unit '{old_id}' to '{new_id}'")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
print(f"\n❌ Error during rename: {e}")
|
||||
return False
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) != 3:
|
||||
print("Usage: python rename_unit.py <old_id> <new_id>")
|
||||
print("Example: python rename_unit.py SLM4301 SLM-43-01")
|
||||
sys.exit(1)
|
||||
|
||||
old_id = sys.argv[1]
|
||||
new_id = sys.argv[2]
|
||||
|
||||
print(f"Unit Renaming Tool")
|
||||
print(f"=" * 50)
|
||||
print(f"Old ID: {old_id}")
|
||||
print(f"New ID: {new_id}")
|
||||
print(f"=" * 50)
|
||||
|
||||
confirm = input(f"\nAre you sure you want to rename '{old_id}' to '{new_id}'? (yes/no): ")
|
||||
if confirm.lower() != 'yes':
|
||||
print("❌ Rename cancelled")
|
||||
sys.exit(0)
|
||||
|
||||
success = rename_unit(old_id, new_id)
|
||||
sys.exit(0 if success else 1)
|
||||
67
scripts/sync_slms_to_slmm.py
Executable file
67
scripts/sync_slms_to_slmm.py
Executable file
@@ -0,0 +1,67 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
One-time script to sync existing SLM devices from Terra-View roster to SLMM cache.
|
||||
Run this after implementing the automatic sync to backfill existing devices.
|
||||
"""
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from backend.database import SessionLocal
|
||||
from backend.models import RosterUnit
|
||||
from backend.routers.roster_edit import sync_slm_to_slmm_cache
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def sync_all_slms():
|
||||
"""Sync all SLM devices from Terra-View roster to SLMM cache."""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
# Get all SLM devices from Terra-View (source of truth)
|
||||
slm_devices = db.query(RosterUnit).filter_by(
|
||||
device_type="slm"
|
||||
).all()
|
||||
|
||||
logger.info(f"Found {len(slm_devices)} SLM devices in Terra-View roster")
|
||||
|
||||
success_count = 0
|
||||
failed_count = 0
|
||||
|
||||
for device in slm_devices:
|
||||
logger.info(f"\nProcessing: {device.id}")
|
||||
logger.info(f" Host: {device.slm_host}")
|
||||
logger.info(f" TCP Port: {device.slm_tcp_port}")
|
||||
logger.info(f" Modem: {device.deployed_with_modem_id}")
|
||||
|
||||
result = await sync_slm_to_slmm_cache(
|
||||
unit_id=device.id,
|
||||
host=device.slm_host,
|
||||
tcp_port=device.slm_tcp_port,
|
||||
ftp_port=device.slm_ftp_port,
|
||||
deployed_with_modem_id=device.deployed_with_modem_id,
|
||||
db=db
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"✓ {device.id}: {result['message']}")
|
||||
success_count += 1
|
||||
else:
|
||||
logger.error(f"✗ {device.id}: {result['message']}")
|
||||
failed_count += 1
|
||||
|
||||
logger.info(f"\n{'='*60}")
|
||||
logger.info(f"Cache sync complete: {success_count} succeeded, {failed_count} failed")
|
||||
logger.info(f"{'='*60}")
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(sync_all_slms())
|
||||
Reference in New Issue
Block a user