Update main to 0.5.1. See changelog. #18
BIN
assets/terra-view-icon_large.png
Normal file
BIN
assets/terra-view-icon_large.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 36 KiB |
@@ -105,8 +105,17 @@ app.include_router(scheduler.router)
|
|||||||
from backend.routers import report_templates
|
from backend.routers import report_templates
|
||||||
app.include_router(report_templates.router)
|
app.include_router(report_templates.router)
|
||||||
|
|
||||||
# Start scheduler service on application startup
|
# Alerts router
|
||||||
|
from backend.routers import alerts
|
||||||
|
app.include_router(alerts.router)
|
||||||
|
|
||||||
|
# Recurring schedules router
|
||||||
|
from backend.routers import recurring_schedules
|
||||||
|
app.include_router(recurring_schedules.router)
|
||||||
|
|
||||||
|
# Start scheduler service and device status monitor on application startup
|
||||||
from backend.services.scheduler import start_scheduler, stop_scheduler
|
from backend.services.scheduler import start_scheduler, stop_scheduler
|
||||||
|
from backend.services.device_status_monitor import start_device_status_monitor, stop_device_status_monitor
|
||||||
|
|
||||||
@app.on_event("startup")
|
@app.on_event("startup")
|
||||||
async def startup_event():
|
async def startup_event():
|
||||||
@@ -115,9 +124,17 @@ async def startup_event():
|
|||||||
await start_scheduler()
|
await start_scheduler()
|
||||||
logger.info("Scheduler service started")
|
logger.info("Scheduler service started")
|
||||||
|
|
||||||
|
logger.info("Starting device status monitor...")
|
||||||
|
await start_device_status_monitor()
|
||||||
|
logger.info("Device status monitor started")
|
||||||
|
|
||||||
@app.on_event("shutdown")
|
@app.on_event("shutdown")
|
||||||
def shutdown_event():
|
def shutdown_event():
|
||||||
"""Clean up services on app shutdown"""
|
"""Clean up services on app shutdown"""
|
||||||
|
logger.info("Stopping device status monitor...")
|
||||||
|
stop_device_status_monitor()
|
||||||
|
logger.info("Device status monitor stopped")
|
||||||
|
|
||||||
logger.info("Stopping scheduler service...")
|
logger.info("Stopping scheduler service...")
|
||||||
stop_scheduler()
|
stop_scheduler()
|
||||||
logger.info("Scheduler service stopped")
|
logger.info("Scheduler service stopped")
|
||||||
|
|||||||
67
backend/migrate_add_auto_increment_index.py
Normal file
67
backend/migrate_add_auto_increment_index.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""
|
||||||
|
Migration: Add auto_increment_index column to recurring_schedules table
|
||||||
|
|
||||||
|
This migration adds the auto_increment_index column that controls whether
|
||||||
|
the scheduler should automatically find an unused store index before starting
|
||||||
|
a new measurement.
|
||||||
|
|
||||||
|
Run this script once to update existing databases:
|
||||||
|
python -m backend.migrate_add_auto_increment_index
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
import os
|
||||||
|
|
||||||
|
DB_PATH = "data/seismo_fleet.db"
|
||||||
|
|
||||||
|
|
||||||
|
def migrate():
|
||||||
|
"""Add auto_increment_index column to recurring_schedules table."""
|
||||||
|
if not os.path.exists(DB_PATH):
|
||||||
|
print(f"Database not found at {DB_PATH}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
conn = sqlite3.connect(DB_PATH)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if recurring_schedules table exists
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT name FROM sqlite_master
|
||||||
|
WHERE type='table' AND name='recurring_schedules'
|
||||||
|
""")
|
||||||
|
if not cursor.fetchone():
|
||||||
|
print("recurring_schedules table does not exist yet. Will be created on app startup.")
|
||||||
|
conn.close()
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check if auto_increment_index column already exists
|
||||||
|
cursor.execute("PRAGMA table_info(recurring_schedules)")
|
||||||
|
columns = [row[1] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
if "auto_increment_index" in columns:
|
||||||
|
print("auto_increment_index column already exists in recurring_schedules table.")
|
||||||
|
conn.close()
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Add the column
|
||||||
|
print("Adding auto_increment_index column to recurring_schedules table...")
|
||||||
|
cursor.execute("""
|
||||||
|
ALTER TABLE recurring_schedules
|
||||||
|
ADD COLUMN auto_increment_index BOOLEAN DEFAULT 1
|
||||||
|
""")
|
||||||
|
conn.commit()
|
||||||
|
print("Successfully added auto_increment_index column.")
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Migration failed: {e}")
|
||||||
|
conn.close()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
success = migrate()
|
||||||
|
exit(0 if success else 1)
|
||||||
@@ -300,3 +300,93 @@ class ReportTemplate(Base):
|
|||||||
|
|
||||||
created_at = Column(DateTime, default=datetime.utcnow)
|
created_at = Column(DateTime, default=datetime.utcnow)
|
||||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Sound Monitoring Scheduler
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
class RecurringSchedule(Base):
|
||||||
|
"""
|
||||||
|
Recurring schedule definitions for automated sound monitoring.
|
||||||
|
|
||||||
|
Supports two schedule types:
|
||||||
|
- "weekly_calendar": Select specific days with start/end times (e.g., Mon/Wed/Fri 7pm-7am)
|
||||||
|
- "simple_interval": For 24/7 monitoring with daily stop/download/restart cycles
|
||||||
|
"""
|
||||||
|
__tablename__ = "recurring_schedules"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True, index=True) # UUID
|
||||||
|
project_id = Column(String, nullable=False, index=True) # FK to Project.id
|
||||||
|
location_id = Column(String, nullable=False, index=True) # FK to MonitoringLocation.id
|
||||||
|
unit_id = Column(String, nullable=True, index=True) # FK to RosterUnit.id (optional, can use assignment)
|
||||||
|
|
||||||
|
name = Column(String, nullable=False) # "Weeknight Monitoring", "24/7 Continuous"
|
||||||
|
schedule_type = Column(String, nullable=False) # "weekly_calendar" | "simple_interval"
|
||||||
|
device_type = Column(String, nullable=False) # "slm" | "seismograph"
|
||||||
|
|
||||||
|
# Weekly Calendar fields (schedule_type = "weekly_calendar")
|
||||||
|
# JSON format: {
|
||||||
|
# "monday": {"enabled": true, "start": "19:00", "end": "07:00"},
|
||||||
|
# "tuesday": {"enabled": false},
|
||||||
|
# ...
|
||||||
|
# }
|
||||||
|
weekly_pattern = Column(Text, nullable=True)
|
||||||
|
|
||||||
|
# Simple Interval fields (schedule_type = "simple_interval")
|
||||||
|
interval_type = Column(String, nullable=True) # "daily" | "hourly"
|
||||||
|
cycle_time = Column(String, nullable=True) # "00:00" - time to run stop/download/restart
|
||||||
|
include_download = Column(Boolean, default=True) # Download data before restart
|
||||||
|
|
||||||
|
# Automation options (applies to both schedule types)
|
||||||
|
auto_increment_index = Column(Boolean, default=True) # Auto-increment store/index number before start
|
||||||
|
# When True: prevents "overwrite data?" prompts by using a new index each time
|
||||||
|
|
||||||
|
# Shared configuration
|
||||||
|
enabled = Column(Boolean, default=True)
|
||||||
|
timezone = Column(String, default="America/New_York")
|
||||||
|
|
||||||
|
# Tracking
|
||||||
|
last_generated_at = Column(DateTime, nullable=True) # When actions were last generated
|
||||||
|
next_occurrence = Column(DateTime, nullable=True) # Computed next action time
|
||||||
|
|
||||||
|
created_at = Column(DateTime, default=datetime.utcnow)
|
||||||
|
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||||
|
|
||||||
|
|
||||||
|
class Alert(Base):
|
||||||
|
"""
|
||||||
|
In-app alerts for device status changes and system events.
|
||||||
|
|
||||||
|
Designed for future expansion to email/webhook notifications.
|
||||||
|
Currently supports:
|
||||||
|
- device_offline: Device became unreachable
|
||||||
|
- device_online: Device came back online
|
||||||
|
- schedule_failed: Scheduled action failed to execute
|
||||||
|
"""
|
||||||
|
__tablename__ = "alerts"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True, index=True) # UUID
|
||||||
|
|
||||||
|
# Alert classification
|
||||||
|
alert_type = Column(String, nullable=False) # "device_offline" | "device_online" | "schedule_failed"
|
||||||
|
severity = Column(String, default="warning") # "info" | "warning" | "critical"
|
||||||
|
|
||||||
|
# Related entities (nullable - may not all apply)
|
||||||
|
project_id = Column(String, nullable=True, index=True)
|
||||||
|
location_id = Column(String, nullable=True, index=True)
|
||||||
|
unit_id = Column(String, nullable=True, index=True)
|
||||||
|
schedule_id = Column(String, nullable=True) # RecurringSchedule or ScheduledAction id
|
||||||
|
|
||||||
|
# Alert content
|
||||||
|
title = Column(String, nullable=False) # "NRL-001 Device Offline"
|
||||||
|
message = Column(Text, nullable=True) # Detailed description
|
||||||
|
alert_metadata = Column(Text, nullable=True) # JSON: additional context data
|
||||||
|
|
||||||
|
# Status tracking
|
||||||
|
status = Column(String, default="active") # "active" | "acknowledged" | "resolved" | "dismissed"
|
||||||
|
acknowledged_at = Column(DateTime, nullable=True)
|
||||||
|
resolved_at = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
created_at = Column(DateTime, default=datetime.utcnow)
|
||||||
|
expires_at = Column(DateTime, nullable=True) # Auto-dismiss after this time
|
||||||
|
|||||||
327
backend/routers/alerts.py
Normal file
327
backend/routers/alerts.py
Normal file
@@ -0,0 +1,327 @@
|
|||||||
|
"""
|
||||||
|
Alerts Router
|
||||||
|
|
||||||
|
API endpoints for managing in-app alerts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Request, Depends, HTTPException, Query
|
||||||
|
from fastapi.templating import Jinja2Templates
|
||||||
|
from fastapi.responses import HTMLResponse, JSONResponse
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from typing import Optional
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from backend.database import get_db
|
||||||
|
from backend.models import Alert, RosterUnit
|
||||||
|
from backend.services.alert_service import get_alert_service
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/alerts", tags=["alerts"])
|
||||||
|
templates = Jinja2Templates(directory="templates")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Alert List and Count
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.get("/")
|
||||||
|
async def list_alerts(
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
status: Optional[str] = Query(None, description="Filter by status: active, acknowledged, resolved, dismissed"),
|
||||||
|
project_id: Optional[str] = Query(None),
|
||||||
|
unit_id: Optional[str] = Query(None),
|
||||||
|
alert_type: Optional[str] = Query(None, description="Filter by type: device_offline, device_online, schedule_failed"),
|
||||||
|
limit: int = Query(50, le=100),
|
||||||
|
offset: int = Query(0, ge=0),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
List alerts with optional filters.
|
||||||
|
"""
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
|
||||||
|
alerts = alert_service.get_all_alerts(
|
||||||
|
status=status,
|
||||||
|
project_id=project_id,
|
||||||
|
unit_id=unit_id,
|
||||||
|
alert_type=alert_type,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"alerts": [
|
||||||
|
{
|
||||||
|
"id": a.id,
|
||||||
|
"alert_type": a.alert_type,
|
||||||
|
"severity": a.severity,
|
||||||
|
"title": a.title,
|
||||||
|
"message": a.message,
|
||||||
|
"status": a.status,
|
||||||
|
"unit_id": a.unit_id,
|
||||||
|
"project_id": a.project_id,
|
||||||
|
"location_id": a.location_id,
|
||||||
|
"created_at": a.created_at.isoformat() if a.created_at else None,
|
||||||
|
"acknowledged_at": a.acknowledged_at.isoformat() if a.acknowledged_at else None,
|
||||||
|
"resolved_at": a.resolved_at.isoformat() if a.resolved_at else None,
|
||||||
|
}
|
||||||
|
for a in alerts
|
||||||
|
],
|
||||||
|
"count": len(alerts),
|
||||||
|
"limit": limit,
|
||||||
|
"offset": offset,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/active")
|
||||||
|
async def list_active_alerts(
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
project_id: Optional[str] = Query(None),
|
||||||
|
unit_id: Optional[str] = Query(None),
|
||||||
|
alert_type: Optional[str] = Query(None),
|
||||||
|
min_severity: Optional[str] = Query(None, description="Minimum severity: info, warning, critical"),
|
||||||
|
limit: int = Query(50, le=100),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
List only active alerts.
|
||||||
|
"""
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
|
||||||
|
alerts = alert_service.get_active_alerts(
|
||||||
|
project_id=project_id,
|
||||||
|
unit_id=unit_id,
|
||||||
|
alert_type=alert_type,
|
||||||
|
min_severity=min_severity,
|
||||||
|
limit=limit,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"alerts": [
|
||||||
|
{
|
||||||
|
"id": a.id,
|
||||||
|
"alert_type": a.alert_type,
|
||||||
|
"severity": a.severity,
|
||||||
|
"title": a.title,
|
||||||
|
"message": a.message,
|
||||||
|
"unit_id": a.unit_id,
|
||||||
|
"project_id": a.project_id,
|
||||||
|
"created_at": a.created_at.isoformat() if a.created_at else None,
|
||||||
|
}
|
||||||
|
for a in alerts
|
||||||
|
],
|
||||||
|
"count": len(alerts),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/active/count")
|
||||||
|
async def get_active_alert_count(db: Session = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Get count of active alerts (for navbar badge).
|
||||||
|
"""
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
count = alert_service.get_active_alert_count()
|
||||||
|
return {"count": count}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Single Alert Operations
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.get("/{alert_id}")
|
||||||
|
async def get_alert(
|
||||||
|
alert_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get a specific alert.
|
||||||
|
"""
|
||||||
|
alert = db.query(Alert).filter_by(id=alert_id).first()
|
||||||
|
if not alert:
|
||||||
|
raise HTTPException(status_code=404, detail="Alert not found")
|
||||||
|
|
||||||
|
# Get related unit info
|
||||||
|
unit = None
|
||||||
|
if alert.unit_id:
|
||||||
|
unit = db.query(RosterUnit).filter_by(id=alert.unit_id).first()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": alert.id,
|
||||||
|
"alert_type": alert.alert_type,
|
||||||
|
"severity": alert.severity,
|
||||||
|
"title": alert.title,
|
||||||
|
"message": alert.message,
|
||||||
|
"metadata": alert.alert_metadata,
|
||||||
|
"status": alert.status,
|
||||||
|
"unit_id": alert.unit_id,
|
||||||
|
"unit_name": unit.id if unit else None,
|
||||||
|
"project_id": alert.project_id,
|
||||||
|
"location_id": alert.location_id,
|
||||||
|
"schedule_id": alert.schedule_id,
|
||||||
|
"created_at": alert.created_at.isoformat() if alert.created_at else None,
|
||||||
|
"acknowledged_at": alert.acknowledged_at.isoformat() if alert.acknowledged_at else None,
|
||||||
|
"resolved_at": alert.resolved_at.isoformat() if alert.resolved_at else None,
|
||||||
|
"expires_at": alert.expires_at.isoformat() if alert.expires_at else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{alert_id}/acknowledge")
|
||||||
|
async def acknowledge_alert(
|
||||||
|
alert_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Mark alert as acknowledged.
|
||||||
|
"""
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
alert = alert_service.acknowledge_alert(alert_id)
|
||||||
|
|
||||||
|
if not alert:
|
||||||
|
raise HTTPException(status_code=404, detail="Alert not found")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"alert_id": alert.id,
|
||||||
|
"status": alert.status,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{alert_id}/dismiss")
|
||||||
|
async def dismiss_alert(
|
||||||
|
alert_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Dismiss alert.
|
||||||
|
"""
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
alert = alert_service.dismiss_alert(alert_id)
|
||||||
|
|
||||||
|
if not alert:
|
||||||
|
raise HTTPException(status_code=404, detail="Alert not found")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"alert_id": alert.id,
|
||||||
|
"status": alert.status,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{alert_id}/resolve")
|
||||||
|
async def resolve_alert(
|
||||||
|
alert_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Manually resolve an alert.
|
||||||
|
"""
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
alert = alert_service.resolve_alert(alert_id)
|
||||||
|
|
||||||
|
if not alert:
|
||||||
|
raise HTTPException(status_code=404, detail="Alert not found")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"alert_id": alert.id,
|
||||||
|
"status": alert.status,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# HTML Partials for HTMX
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.get("/partials/dropdown", response_class=HTMLResponse)
|
||||||
|
async def get_alert_dropdown(
|
||||||
|
request: Request,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Return HTML partial for alert dropdown in navbar.
|
||||||
|
"""
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
alerts = alert_service.get_active_alerts(limit=10)
|
||||||
|
|
||||||
|
# Calculate relative time for each alert
|
||||||
|
now = datetime.utcnow()
|
||||||
|
alerts_data = []
|
||||||
|
for alert in alerts:
|
||||||
|
delta = now - alert.created_at
|
||||||
|
if delta.days > 0:
|
||||||
|
time_ago = f"{delta.days}d ago"
|
||||||
|
elif delta.seconds >= 3600:
|
||||||
|
time_ago = f"{delta.seconds // 3600}h ago"
|
||||||
|
elif delta.seconds >= 60:
|
||||||
|
time_ago = f"{delta.seconds // 60}m ago"
|
||||||
|
else:
|
||||||
|
time_ago = "just now"
|
||||||
|
|
||||||
|
alerts_data.append({
|
||||||
|
"alert": alert,
|
||||||
|
"time_ago": time_ago,
|
||||||
|
})
|
||||||
|
|
||||||
|
return templates.TemplateResponse("partials/alerts/alert_dropdown.html", {
|
||||||
|
"request": request,
|
||||||
|
"alerts": alerts_data,
|
||||||
|
"total_count": alert_service.get_active_alert_count(),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/partials/list", response_class=HTMLResponse)
|
||||||
|
async def get_alert_list(
|
||||||
|
request: Request,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
status: Optional[str] = Query(None),
|
||||||
|
limit: int = Query(20),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Return HTML partial for alert list page.
|
||||||
|
"""
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
|
||||||
|
if status:
|
||||||
|
alerts = alert_service.get_all_alerts(status=status, limit=limit)
|
||||||
|
else:
|
||||||
|
alerts = alert_service.get_all_alerts(limit=limit)
|
||||||
|
|
||||||
|
# Calculate relative time for each alert
|
||||||
|
now = datetime.utcnow()
|
||||||
|
alerts_data = []
|
||||||
|
for alert in alerts:
|
||||||
|
delta = now - alert.created_at
|
||||||
|
if delta.days > 0:
|
||||||
|
time_ago = f"{delta.days}d ago"
|
||||||
|
elif delta.seconds >= 3600:
|
||||||
|
time_ago = f"{delta.seconds // 3600}h ago"
|
||||||
|
elif delta.seconds >= 60:
|
||||||
|
time_ago = f"{delta.seconds // 60}m ago"
|
||||||
|
else:
|
||||||
|
time_ago = "just now"
|
||||||
|
|
||||||
|
alerts_data.append({
|
||||||
|
"alert": alert,
|
||||||
|
"time_ago": time_ago,
|
||||||
|
})
|
||||||
|
|
||||||
|
return templates.TemplateResponse("partials/alerts/alert_list.html", {
|
||||||
|
"request": request,
|
||||||
|
"alerts": alerts_data,
|
||||||
|
"status_filter": status,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Cleanup
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.post("/cleanup-expired")
|
||||||
|
async def cleanup_expired_alerts(db: Session = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Cleanup expired alerts (admin/maintenance endpoint).
|
||||||
|
"""
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
count = alert_service.cleanup_expired_alerts()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"cleaned_up": count,
|
||||||
|
}
|
||||||
@@ -90,6 +90,40 @@ async def get_project_locations(
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/locations-json")
|
||||||
|
async def get_project_locations_json(
|
||||||
|
project_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
location_type: Optional[str] = Query(None),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get all monitoring locations for a project as JSON.
|
||||||
|
Used by the schedule modal to populate location dropdown.
|
||||||
|
"""
|
||||||
|
project = db.query(Project).filter_by(id=project_id).first()
|
||||||
|
if not project:
|
||||||
|
raise HTTPException(status_code=404, detail="Project not found")
|
||||||
|
|
||||||
|
query = db.query(MonitoringLocation).filter_by(project_id=project_id)
|
||||||
|
|
||||||
|
if location_type:
|
||||||
|
query = query.filter_by(location_type=location_type)
|
||||||
|
|
||||||
|
locations = query.order_by(MonitoringLocation.name).all()
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"id": loc.id,
|
||||||
|
"name": loc.name,
|
||||||
|
"location_type": loc.location_type,
|
||||||
|
"description": loc.description,
|
||||||
|
"address": loc.address,
|
||||||
|
"coordinates": loc.coordinates,
|
||||||
|
}
|
||||||
|
for loc in locations
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@router.post("/locations/create")
|
@router.post("/locations/create")
|
||||||
async def create_location(
|
async def create_location(
|
||||||
project_id: str,
|
project_id: str,
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ from backend.models import (
|
|||||||
UnitAssignment,
|
UnitAssignment,
|
||||||
RecordingSession,
|
RecordingSession,
|
||||||
ScheduledAction,
|
ScheduledAction,
|
||||||
|
RecurringSchedule,
|
||||||
RosterUnit,
|
RosterUnit,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
458
backend/routers/recurring_schedules.py
Normal file
458
backend/routers/recurring_schedules.py
Normal file
@@ -0,0 +1,458 @@
|
|||||||
|
"""
|
||||||
|
Recurring Schedules Router
|
||||||
|
|
||||||
|
API endpoints for managing recurring monitoring schedules.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Request, Depends, HTTPException, Query
|
||||||
|
from fastapi.templating import Jinja2Templates
|
||||||
|
from fastapi.responses import HTMLResponse, JSONResponse
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from typing import Optional
|
||||||
|
from datetime import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
|
from backend.database import get_db
|
||||||
|
from backend.models import RecurringSchedule, MonitoringLocation, Project, RosterUnit
|
||||||
|
from backend.services.recurring_schedule_service import get_recurring_schedule_service
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/projects/{project_id}/recurring-schedules", tags=["recurring-schedules"])
|
||||||
|
templates = Jinja2Templates(directory="templates")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# List and Get
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.get("/")
|
||||||
|
async def list_recurring_schedules(
|
||||||
|
project_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
enabled_only: bool = Query(False),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
List all recurring schedules for a project.
|
||||||
|
"""
|
||||||
|
project = db.query(Project).filter_by(id=project_id).first()
|
||||||
|
if not project:
|
||||||
|
raise HTTPException(status_code=404, detail="Project not found")
|
||||||
|
|
||||||
|
query = db.query(RecurringSchedule).filter_by(project_id=project_id)
|
||||||
|
if enabled_only:
|
||||||
|
query = query.filter_by(enabled=True)
|
||||||
|
|
||||||
|
schedules = query.order_by(RecurringSchedule.created_at.desc()).all()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"schedules": [
|
||||||
|
{
|
||||||
|
"id": s.id,
|
||||||
|
"name": s.name,
|
||||||
|
"schedule_type": s.schedule_type,
|
||||||
|
"device_type": s.device_type,
|
||||||
|
"location_id": s.location_id,
|
||||||
|
"unit_id": s.unit_id,
|
||||||
|
"enabled": s.enabled,
|
||||||
|
"weekly_pattern": json.loads(s.weekly_pattern) if s.weekly_pattern else None,
|
||||||
|
"interval_type": s.interval_type,
|
||||||
|
"cycle_time": s.cycle_time,
|
||||||
|
"include_download": s.include_download,
|
||||||
|
"timezone": s.timezone,
|
||||||
|
"next_occurrence": s.next_occurrence.isoformat() if s.next_occurrence else None,
|
||||||
|
"last_generated_at": s.last_generated_at.isoformat() if s.last_generated_at else None,
|
||||||
|
"created_at": s.created_at.isoformat() if s.created_at else None,
|
||||||
|
}
|
||||||
|
for s in schedules
|
||||||
|
],
|
||||||
|
"count": len(schedules),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{schedule_id}")
|
||||||
|
async def get_recurring_schedule(
|
||||||
|
project_id: str,
|
||||||
|
schedule_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get a specific recurring schedule.
|
||||||
|
"""
|
||||||
|
schedule = db.query(RecurringSchedule).filter_by(
|
||||||
|
id=schedule_id,
|
||||||
|
project_id=project_id,
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not schedule:
|
||||||
|
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||||
|
|
||||||
|
# Get related location and unit info
|
||||||
|
location = db.query(MonitoringLocation).filter_by(id=schedule.location_id).first()
|
||||||
|
unit = None
|
||||||
|
if schedule.unit_id:
|
||||||
|
unit = db.query(RosterUnit).filter_by(id=schedule.unit_id).first()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": schedule.id,
|
||||||
|
"name": schedule.name,
|
||||||
|
"schedule_type": schedule.schedule_type,
|
||||||
|
"device_type": schedule.device_type,
|
||||||
|
"location_id": schedule.location_id,
|
||||||
|
"location_name": location.name if location else None,
|
||||||
|
"unit_id": schedule.unit_id,
|
||||||
|
"unit_name": unit.id if unit else None,
|
||||||
|
"enabled": schedule.enabled,
|
||||||
|
"weekly_pattern": json.loads(schedule.weekly_pattern) if schedule.weekly_pattern else None,
|
||||||
|
"interval_type": schedule.interval_type,
|
||||||
|
"cycle_time": schedule.cycle_time,
|
||||||
|
"include_download": schedule.include_download,
|
||||||
|
"timezone": schedule.timezone,
|
||||||
|
"next_occurrence": schedule.next_occurrence.isoformat() if schedule.next_occurrence else None,
|
||||||
|
"last_generated_at": schedule.last_generated_at.isoformat() if schedule.last_generated_at else None,
|
||||||
|
"created_at": schedule.created_at.isoformat() if schedule.created_at else None,
|
||||||
|
"updated_at": schedule.updated_at.isoformat() if schedule.updated_at else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Create
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.post("/")
|
||||||
|
async def create_recurring_schedule(
|
||||||
|
project_id: str,
|
||||||
|
request: Request,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Create recurring schedules for one or more locations.
|
||||||
|
|
||||||
|
Body for weekly_calendar (supports multiple locations):
|
||||||
|
{
|
||||||
|
"name": "Weeknight Monitoring",
|
||||||
|
"schedule_type": "weekly_calendar",
|
||||||
|
"location_ids": ["uuid1", "uuid2"], // Array of location IDs
|
||||||
|
"weekly_pattern": {
|
||||||
|
"monday": {"enabled": true, "start": "19:00", "end": "07:00"},
|
||||||
|
"tuesday": {"enabled": false},
|
||||||
|
...
|
||||||
|
},
|
||||||
|
"include_download": true,
|
||||||
|
"auto_increment_index": true,
|
||||||
|
"timezone": "America/New_York"
|
||||||
|
}
|
||||||
|
|
||||||
|
Body for simple_interval (supports multiple locations):
|
||||||
|
{
|
||||||
|
"name": "24/7 Continuous",
|
||||||
|
"schedule_type": "simple_interval",
|
||||||
|
"location_ids": ["uuid1", "uuid2"], // Array of location IDs
|
||||||
|
"interval_type": "daily",
|
||||||
|
"cycle_time": "00:00",
|
||||||
|
"include_download": true,
|
||||||
|
"auto_increment_index": true,
|
||||||
|
"timezone": "America/New_York"
|
||||||
|
}
|
||||||
|
|
||||||
|
Legacy single location support (backwards compatible):
|
||||||
|
{
|
||||||
|
"name": "...",
|
||||||
|
"location_id": "uuid", // Single location ID
|
||||||
|
...
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
project = db.query(Project).filter_by(id=project_id).first()
|
||||||
|
if not project:
|
||||||
|
raise HTTPException(status_code=404, detail="Project not found")
|
||||||
|
|
||||||
|
data = await request.json()
|
||||||
|
|
||||||
|
# Support both location_ids (array) and location_id (single) for backwards compatibility
|
||||||
|
location_ids = data.get("location_ids", [])
|
||||||
|
if not location_ids and data.get("location_id"):
|
||||||
|
location_ids = [data.get("location_id")]
|
||||||
|
|
||||||
|
if not location_ids:
|
||||||
|
raise HTTPException(status_code=400, detail="At least one location is required")
|
||||||
|
|
||||||
|
# Validate all locations exist
|
||||||
|
locations = db.query(MonitoringLocation).filter(
|
||||||
|
MonitoringLocation.id.in_(location_ids),
|
||||||
|
MonitoringLocation.project_id == project_id,
|
||||||
|
).all()
|
||||||
|
|
||||||
|
if len(locations) != len(location_ids):
|
||||||
|
raise HTTPException(status_code=404, detail="One or more locations not found")
|
||||||
|
|
||||||
|
service = get_recurring_schedule_service(db)
|
||||||
|
created_schedules = []
|
||||||
|
base_name = data.get("name", "Unnamed Schedule")
|
||||||
|
|
||||||
|
# Create a schedule for each location
|
||||||
|
for location in locations:
|
||||||
|
# Determine device type from location
|
||||||
|
device_type = "slm" if location.location_type == "sound" else "seismograph"
|
||||||
|
|
||||||
|
# Append location name if multiple locations
|
||||||
|
schedule_name = f"{base_name} - {location.name}" if len(locations) > 1 else base_name
|
||||||
|
|
||||||
|
schedule = service.create_schedule(
|
||||||
|
project_id=project_id,
|
||||||
|
location_id=location.id,
|
||||||
|
name=schedule_name,
|
||||||
|
schedule_type=data.get("schedule_type", "weekly_calendar"),
|
||||||
|
device_type=device_type,
|
||||||
|
unit_id=data.get("unit_id"),
|
||||||
|
weekly_pattern=data.get("weekly_pattern"),
|
||||||
|
interval_type=data.get("interval_type"),
|
||||||
|
cycle_time=data.get("cycle_time"),
|
||||||
|
include_download=data.get("include_download", True),
|
||||||
|
auto_increment_index=data.get("auto_increment_index", True),
|
||||||
|
timezone=data.get("timezone", "America/New_York"),
|
||||||
|
)
|
||||||
|
created_schedules.append({
|
||||||
|
"schedule_id": schedule.id,
|
||||||
|
"location_id": location.id,
|
||||||
|
"location_name": location.name,
|
||||||
|
})
|
||||||
|
|
||||||
|
return JSONResponse({
|
||||||
|
"success": True,
|
||||||
|
"schedules": created_schedules,
|
||||||
|
"count": len(created_schedules),
|
||||||
|
"message": f"Created {len(created_schedules)} recurring schedule(s)",
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Update
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.put("/{schedule_id}")
|
||||||
|
async def update_recurring_schedule(
|
||||||
|
project_id: str,
|
||||||
|
schedule_id: str,
|
||||||
|
request: Request,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Update a recurring schedule.
|
||||||
|
"""
|
||||||
|
schedule = db.query(RecurringSchedule).filter_by(
|
||||||
|
id=schedule_id,
|
||||||
|
project_id=project_id,
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not schedule:
|
||||||
|
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||||
|
|
||||||
|
data = await request.json()
|
||||||
|
service = get_recurring_schedule_service(db)
|
||||||
|
|
||||||
|
# Build update kwargs
|
||||||
|
update_kwargs = {}
|
||||||
|
for field in ["name", "weekly_pattern", "interval_type", "cycle_time",
|
||||||
|
"include_download", "auto_increment_index", "timezone", "unit_id"]:
|
||||||
|
if field in data:
|
||||||
|
update_kwargs[field] = data[field]
|
||||||
|
|
||||||
|
updated = service.update_schedule(schedule_id, **update_kwargs)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"schedule_id": updated.id,
|
||||||
|
"message": "Schedule updated successfully",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Delete
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.delete("/{schedule_id}")
|
||||||
|
async def delete_recurring_schedule(
|
||||||
|
project_id: str,
|
||||||
|
schedule_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Delete a recurring schedule.
|
||||||
|
"""
|
||||||
|
service = get_recurring_schedule_service(db)
|
||||||
|
deleted = service.delete_schedule(schedule_id)
|
||||||
|
|
||||||
|
if not deleted:
|
||||||
|
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": "Schedule deleted successfully",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Enable/Disable
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.post("/{schedule_id}/enable")
|
||||||
|
async def enable_schedule(
|
||||||
|
project_id: str,
|
||||||
|
schedule_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Enable a disabled schedule.
|
||||||
|
"""
|
||||||
|
service = get_recurring_schedule_service(db)
|
||||||
|
schedule = service.enable_schedule(schedule_id)
|
||||||
|
|
||||||
|
if not schedule:
|
||||||
|
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"schedule_id": schedule.id,
|
||||||
|
"enabled": schedule.enabled,
|
||||||
|
"message": "Schedule enabled",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{schedule_id}/disable")
|
||||||
|
async def disable_schedule(
|
||||||
|
project_id: str,
|
||||||
|
schedule_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Disable a schedule.
|
||||||
|
"""
|
||||||
|
service = get_recurring_schedule_service(db)
|
||||||
|
schedule = service.disable_schedule(schedule_id)
|
||||||
|
|
||||||
|
if not schedule:
|
||||||
|
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"schedule_id": schedule.id,
|
||||||
|
"enabled": schedule.enabled,
|
||||||
|
"message": "Schedule disabled",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Preview Generated Actions
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.post("/{schedule_id}/generate-preview")
|
||||||
|
async def preview_generated_actions(
|
||||||
|
project_id: str,
|
||||||
|
schedule_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
days: int = Query(7, ge=1, le=30),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Preview what actions would be generated without saving them.
|
||||||
|
"""
|
||||||
|
schedule = db.query(RecurringSchedule).filter_by(
|
||||||
|
id=schedule_id,
|
||||||
|
project_id=project_id,
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not schedule:
|
||||||
|
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||||
|
|
||||||
|
service = get_recurring_schedule_service(db)
|
||||||
|
actions = service.generate_actions_for_schedule(
|
||||||
|
schedule,
|
||||||
|
horizon_days=days,
|
||||||
|
preview_only=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"schedule_id": schedule_id,
|
||||||
|
"schedule_name": schedule.name,
|
||||||
|
"preview_days": days,
|
||||||
|
"actions": [
|
||||||
|
{
|
||||||
|
"action_type": a.action_type,
|
||||||
|
"scheduled_time": a.scheduled_time.isoformat(),
|
||||||
|
"notes": a.notes,
|
||||||
|
}
|
||||||
|
for a in actions
|
||||||
|
],
|
||||||
|
"action_count": len(actions),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Manual Generation Trigger
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.post("/{schedule_id}/generate")
|
||||||
|
async def generate_actions_now(
|
||||||
|
project_id: str,
|
||||||
|
schedule_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
days: int = Query(7, ge=1, le=30),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Manually trigger action generation for a schedule.
|
||||||
|
"""
|
||||||
|
schedule = db.query(RecurringSchedule).filter_by(
|
||||||
|
id=schedule_id,
|
||||||
|
project_id=project_id,
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not schedule:
|
||||||
|
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||||
|
|
||||||
|
if not schedule.enabled:
|
||||||
|
raise HTTPException(status_code=400, detail="Schedule is disabled")
|
||||||
|
|
||||||
|
service = get_recurring_schedule_service(db)
|
||||||
|
actions = service.generate_actions_for_schedule(
|
||||||
|
schedule,
|
||||||
|
horizon_days=days,
|
||||||
|
preview_only=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"schedule_id": schedule_id,
|
||||||
|
"generated_count": len(actions),
|
||||||
|
"message": f"Generated {len(actions)} scheduled actions",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# HTML Partials
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.get("/partials/list", response_class=HTMLResponse)
|
||||||
|
async def get_schedule_list_partial(
|
||||||
|
project_id: str,
|
||||||
|
request: Request,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Return HTML partial for schedule list.
|
||||||
|
"""
|
||||||
|
schedules = db.query(RecurringSchedule).filter_by(
|
||||||
|
project_id=project_id
|
||||||
|
).order_by(RecurringSchedule.created_at.desc()).all()
|
||||||
|
|
||||||
|
# Enrich with location info
|
||||||
|
schedule_data = []
|
||||||
|
for s in schedules:
|
||||||
|
location = db.query(MonitoringLocation).filter_by(id=s.location_id).first()
|
||||||
|
schedule_data.append({
|
||||||
|
"schedule": s,
|
||||||
|
"location": location,
|
||||||
|
"pattern": json.loads(s.weekly_pattern) if s.weekly_pattern else None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return templates.TemplateResponse("partials/projects/recurring_schedule_list.html", {
|
||||||
|
"request": request,
|
||||||
|
"project_id": project_id,
|
||||||
|
"schedules": schedule_data,
|
||||||
|
})
|
||||||
407
backend/services/alert_service.py
Normal file
407
backend/services/alert_service.py
Normal file
@@ -0,0 +1,407 @@
|
|||||||
|
"""
|
||||||
|
Alert Service
|
||||||
|
|
||||||
|
Manages in-app alerts for device status changes and system events.
|
||||||
|
Provides foundation for future notification channels (email, webhook).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, List, Dict, Any
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy import and_, or_
|
||||||
|
|
||||||
|
from backend.models import Alert, RosterUnit
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AlertService:
|
||||||
|
"""
|
||||||
|
Service for managing alerts.
|
||||||
|
|
||||||
|
Handles alert lifecycle:
|
||||||
|
- Create alerts from various triggers
|
||||||
|
- Query active alerts
|
||||||
|
- Acknowledge/resolve/dismiss alerts
|
||||||
|
- (Future) Dispatch to notification channels
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, db: Session):
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
def create_alert(
|
||||||
|
self,
|
||||||
|
alert_type: str,
|
||||||
|
title: str,
|
||||||
|
message: str = None,
|
||||||
|
severity: str = "warning",
|
||||||
|
unit_id: str = None,
|
||||||
|
project_id: str = None,
|
||||||
|
location_id: str = None,
|
||||||
|
schedule_id: str = None,
|
||||||
|
metadata: dict = None,
|
||||||
|
expires_hours: int = 24,
|
||||||
|
) -> Alert:
|
||||||
|
"""
|
||||||
|
Create a new alert.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
alert_type: Type of alert (device_offline, device_online, schedule_failed)
|
||||||
|
title: Short alert title
|
||||||
|
message: Detailed description
|
||||||
|
severity: info, warning, or critical
|
||||||
|
unit_id: Related unit ID (optional)
|
||||||
|
project_id: Related project ID (optional)
|
||||||
|
location_id: Related location ID (optional)
|
||||||
|
schedule_id: Related schedule ID (optional)
|
||||||
|
metadata: Additional JSON data
|
||||||
|
expires_hours: Hours until auto-expiry (default 24)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created Alert instance
|
||||||
|
"""
|
||||||
|
alert = Alert(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
alert_type=alert_type,
|
||||||
|
title=title,
|
||||||
|
message=message,
|
||||||
|
severity=severity,
|
||||||
|
unit_id=unit_id,
|
||||||
|
project_id=project_id,
|
||||||
|
location_id=location_id,
|
||||||
|
schedule_id=schedule_id,
|
||||||
|
alert_metadata=json.dumps(metadata) if metadata else None,
|
||||||
|
status="active",
|
||||||
|
expires_at=datetime.utcnow() + timedelta(hours=expires_hours),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.db.add(alert)
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(alert)
|
||||||
|
|
||||||
|
logger.info(f"Created alert: {alert.title} ({alert.alert_type})")
|
||||||
|
return alert
|
||||||
|
|
||||||
|
def create_device_offline_alert(
|
||||||
|
self,
|
||||||
|
unit_id: str,
|
||||||
|
consecutive_failures: int = 0,
|
||||||
|
last_error: str = None,
|
||||||
|
) -> Optional[Alert]:
|
||||||
|
"""
|
||||||
|
Create alert when device becomes unreachable.
|
||||||
|
|
||||||
|
Only creates if no active offline alert exists for this device.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: The unit that went offline
|
||||||
|
consecutive_failures: Number of consecutive poll failures
|
||||||
|
last_error: Last error message from polling
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created Alert or None if alert already exists
|
||||||
|
"""
|
||||||
|
# Check if active offline alert already exists
|
||||||
|
existing = self.db.query(Alert).filter(
|
||||||
|
and_(
|
||||||
|
Alert.unit_id == unit_id,
|
||||||
|
Alert.alert_type == "device_offline",
|
||||||
|
Alert.status == "active",
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
logger.debug(f"Offline alert already exists for {unit_id}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Get unit info for title
|
||||||
|
unit = self.db.query(RosterUnit).filter_by(id=unit_id).first()
|
||||||
|
unit_name = unit.id if unit else unit_id
|
||||||
|
|
||||||
|
# Determine severity based on failure count
|
||||||
|
severity = "critical" if consecutive_failures >= 5 else "warning"
|
||||||
|
|
||||||
|
return self.create_alert(
|
||||||
|
alert_type="device_offline",
|
||||||
|
title=f"{unit_name} is offline",
|
||||||
|
message=f"Device has been unreachable after {consecutive_failures} failed connection attempts."
|
||||||
|
+ (f" Last error: {last_error}" if last_error else ""),
|
||||||
|
severity=severity,
|
||||||
|
unit_id=unit_id,
|
||||||
|
metadata={
|
||||||
|
"consecutive_failures": consecutive_failures,
|
||||||
|
"last_error": last_error,
|
||||||
|
},
|
||||||
|
expires_hours=48, # Offline alerts stay longer
|
||||||
|
)
|
||||||
|
|
||||||
|
def resolve_device_offline_alert(self, unit_id: str) -> Optional[Alert]:
|
||||||
|
"""
|
||||||
|
Auto-resolve offline alert when device comes back online.
|
||||||
|
|
||||||
|
Also creates an "device_online" info alert to notify user.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: The unit that came back online
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The resolved Alert or None if no alert existed
|
||||||
|
"""
|
||||||
|
# Find active offline alert
|
||||||
|
alert = self.db.query(Alert).filter(
|
||||||
|
and_(
|
||||||
|
Alert.unit_id == unit_id,
|
||||||
|
Alert.alert_type == "device_offline",
|
||||||
|
Alert.status == "active",
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not alert:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Resolve the offline alert
|
||||||
|
alert.status = "resolved"
|
||||||
|
alert.resolved_at = datetime.utcnow()
|
||||||
|
self.db.commit()
|
||||||
|
|
||||||
|
logger.info(f"Resolved offline alert for {unit_id}")
|
||||||
|
|
||||||
|
# Create online notification
|
||||||
|
unit = self.db.query(RosterUnit).filter_by(id=unit_id).first()
|
||||||
|
unit_name = unit.id if unit else unit_id
|
||||||
|
|
||||||
|
self.create_alert(
|
||||||
|
alert_type="device_online",
|
||||||
|
title=f"{unit_name} is back online",
|
||||||
|
message="Device connection has been restored.",
|
||||||
|
severity="info",
|
||||||
|
unit_id=unit_id,
|
||||||
|
expires_hours=6, # Info alerts expire quickly
|
||||||
|
)
|
||||||
|
|
||||||
|
return alert
|
||||||
|
|
||||||
|
def create_schedule_failed_alert(
|
||||||
|
self,
|
||||||
|
schedule_id: str,
|
||||||
|
action_type: str,
|
||||||
|
unit_id: str = None,
|
||||||
|
error_message: str = None,
|
||||||
|
project_id: str = None,
|
||||||
|
location_id: str = None,
|
||||||
|
) -> Alert:
|
||||||
|
"""
|
||||||
|
Create alert when a scheduled action fails.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schedule_id: The ScheduledAction or RecurringSchedule ID
|
||||||
|
action_type: start, stop, download
|
||||||
|
unit_id: Related unit
|
||||||
|
error_message: Error from execution
|
||||||
|
project_id: Related project
|
||||||
|
location_id: Related location
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created Alert
|
||||||
|
"""
|
||||||
|
return self.create_alert(
|
||||||
|
alert_type="schedule_failed",
|
||||||
|
title=f"Scheduled {action_type} failed",
|
||||||
|
message=error_message or f"The scheduled {action_type} action did not complete successfully.",
|
||||||
|
severity="warning",
|
||||||
|
unit_id=unit_id,
|
||||||
|
project_id=project_id,
|
||||||
|
location_id=location_id,
|
||||||
|
schedule_id=schedule_id,
|
||||||
|
metadata={"action_type": action_type},
|
||||||
|
expires_hours=24,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_active_alerts(
|
||||||
|
self,
|
||||||
|
project_id: str = None,
|
||||||
|
unit_id: str = None,
|
||||||
|
alert_type: str = None,
|
||||||
|
min_severity: str = None,
|
||||||
|
limit: int = 50,
|
||||||
|
) -> List[Alert]:
|
||||||
|
"""
|
||||||
|
Query active alerts with optional filters.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_id: Filter by project
|
||||||
|
unit_id: Filter by unit
|
||||||
|
alert_type: Filter by alert type
|
||||||
|
min_severity: Minimum severity (info, warning, critical)
|
||||||
|
limit: Maximum results
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of matching alerts
|
||||||
|
"""
|
||||||
|
query = self.db.query(Alert).filter(Alert.status == "active")
|
||||||
|
|
||||||
|
if project_id:
|
||||||
|
query = query.filter(Alert.project_id == project_id)
|
||||||
|
|
||||||
|
if unit_id:
|
||||||
|
query = query.filter(Alert.unit_id == unit_id)
|
||||||
|
|
||||||
|
if alert_type:
|
||||||
|
query = query.filter(Alert.alert_type == alert_type)
|
||||||
|
|
||||||
|
if min_severity:
|
||||||
|
# Map severity to numeric for comparison
|
||||||
|
severity_levels = {"info": 1, "warning": 2, "critical": 3}
|
||||||
|
min_level = severity_levels.get(min_severity, 1)
|
||||||
|
|
||||||
|
if min_level == 2:
|
||||||
|
query = query.filter(Alert.severity.in_(["warning", "critical"]))
|
||||||
|
elif min_level == 3:
|
||||||
|
query = query.filter(Alert.severity == "critical")
|
||||||
|
|
||||||
|
return query.order_by(Alert.created_at.desc()).limit(limit).all()
|
||||||
|
|
||||||
|
def get_all_alerts(
|
||||||
|
self,
|
||||||
|
status: str = None,
|
||||||
|
project_id: str = None,
|
||||||
|
unit_id: str = None,
|
||||||
|
alert_type: str = None,
|
||||||
|
limit: int = 50,
|
||||||
|
offset: int = 0,
|
||||||
|
) -> List[Alert]:
|
||||||
|
"""
|
||||||
|
Query all alerts with optional filters (includes non-active).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
status: Filter by status (active, acknowledged, resolved, dismissed)
|
||||||
|
project_id: Filter by project
|
||||||
|
unit_id: Filter by unit
|
||||||
|
alert_type: Filter by alert type
|
||||||
|
limit: Maximum results
|
||||||
|
offset: Pagination offset
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of matching alerts
|
||||||
|
"""
|
||||||
|
query = self.db.query(Alert)
|
||||||
|
|
||||||
|
if status:
|
||||||
|
query = query.filter(Alert.status == status)
|
||||||
|
|
||||||
|
if project_id:
|
||||||
|
query = query.filter(Alert.project_id == project_id)
|
||||||
|
|
||||||
|
if unit_id:
|
||||||
|
query = query.filter(Alert.unit_id == unit_id)
|
||||||
|
|
||||||
|
if alert_type:
|
||||||
|
query = query.filter(Alert.alert_type == alert_type)
|
||||||
|
|
||||||
|
return (
|
||||||
|
query.order_by(Alert.created_at.desc())
|
||||||
|
.offset(offset)
|
||||||
|
.limit(limit)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_active_alert_count(self) -> int:
|
||||||
|
"""Get count of active alerts for badge display."""
|
||||||
|
return self.db.query(Alert).filter(Alert.status == "active").count()
|
||||||
|
|
||||||
|
def acknowledge_alert(self, alert_id: str) -> Optional[Alert]:
|
||||||
|
"""
|
||||||
|
Mark alert as acknowledged.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
alert_id: Alert to acknowledge
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Updated Alert or None if not found
|
||||||
|
"""
|
||||||
|
alert = self.db.query(Alert).filter_by(id=alert_id).first()
|
||||||
|
if not alert:
|
||||||
|
return None
|
||||||
|
|
||||||
|
alert.status = "acknowledged"
|
||||||
|
alert.acknowledged_at = datetime.utcnow()
|
||||||
|
self.db.commit()
|
||||||
|
|
||||||
|
logger.info(f"Acknowledged alert: {alert.title}")
|
||||||
|
return alert
|
||||||
|
|
||||||
|
def dismiss_alert(self, alert_id: str) -> Optional[Alert]:
|
||||||
|
"""
|
||||||
|
Dismiss alert (user chose to ignore).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
alert_id: Alert to dismiss
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Updated Alert or None if not found
|
||||||
|
"""
|
||||||
|
alert = self.db.query(Alert).filter_by(id=alert_id).first()
|
||||||
|
if not alert:
|
||||||
|
return None
|
||||||
|
|
||||||
|
alert.status = "dismissed"
|
||||||
|
self.db.commit()
|
||||||
|
|
||||||
|
logger.info(f"Dismissed alert: {alert.title}")
|
||||||
|
return alert
|
||||||
|
|
||||||
|
def resolve_alert(self, alert_id: str) -> Optional[Alert]:
|
||||||
|
"""
|
||||||
|
Manually resolve an alert.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
alert_id: Alert to resolve
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Updated Alert or None if not found
|
||||||
|
"""
|
||||||
|
alert = self.db.query(Alert).filter_by(id=alert_id).first()
|
||||||
|
if not alert:
|
||||||
|
return None
|
||||||
|
|
||||||
|
alert.status = "resolved"
|
||||||
|
alert.resolved_at = datetime.utcnow()
|
||||||
|
self.db.commit()
|
||||||
|
|
||||||
|
logger.info(f"Resolved alert: {alert.title}")
|
||||||
|
return alert
|
||||||
|
|
||||||
|
def cleanup_expired_alerts(self) -> int:
|
||||||
|
"""
|
||||||
|
Remove alerts past their expiration time.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of alerts cleaned up
|
||||||
|
"""
|
||||||
|
now = datetime.utcnow()
|
||||||
|
expired = self.db.query(Alert).filter(
|
||||||
|
and_(
|
||||||
|
Alert.expires_at.isnot(None),
|
||||||
|
Alert.expires_at < now,
|
||||||
|
Alert.status == "active",
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
count = len(expired)
|
||||||
|
for alert in expired:
|
||||||
|
alert.status = "dismissed"
|
||||||
|
|
||||||
|
if count > 0:
|
||||||
|
self.db.commit()
|
||||||
|
logger.info(f"Cleaned up {count} expired alerts")
|
||||||
|
|
||||||
|
return count
|
||||||
|
|
||||||
|
|
||||||
|
def get_alert_service(db: Session) -> AlertService:
|
||||||
|
"""Get an AlertService instance with the given database session."""
|
||||||
|
return AlertService(db)
|
||||||
@@ -333,6 +333,76 @@ class DeviceController:
|
|||||||
else:
|
else:
|
||||||
raise UnsupportedDeviceTypeError(f"Unsupported device type: {device_type}")
|
raise UnsupportedDeviceTypeError(f"Unsupported device type: {device_type}")
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Store/Index Management
|
||||||
|
# ========================================================================
|
||||||
|
|
||||||
|
async def increment_index(
|
||||||
|
self,
|
||||||
|
unit_id: str,
|
||||||
|
device_type: str,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Increment the store/index number on a device.
|
||||||
|
|
||||||
|
For SLMs, this increments the store name to prevent "overwrite data?" prompts.
|
||||||
|
Should be called before starting a new measurement if auto_increment_index is enabled.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: Unit identifier
|
||||||
|
device_type: "slm" | "seismograph"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict with old_index and new_index
|
||||||
|
"""
|
||||||
|
if device_type == "slm":
|
||||||
|
try:
|
||||||
|
return await self.slmm_client.increment_index(unit_id)
|
||||||
|
except SLMMClientError as e:
|
||||||
|
raise DeviceControllerError(f"SLMM error: {str(e)}")
|
||||||
|
|
||||||
|
elif device_type == "seismograph":
|
||||||
|
# Seismographs may not have the same concept of store index
|
||||||
|
return {
|
||||||
|
"status": "not_applicable",
|
||||||
|
"message": "Index increment not applicable for seismographs",
|
||||||
|
"unit_id": unit_id,
|
||||||
|
}
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise UnsupportedDeviceTypeError(f"Unsupported device type: {device_type}")
|
||||||
|
|
||||||
|
async def get_index_number(
|
||||||
|
self,
|
||||||
|
unit_id: str,
|
||||||
|
device_type: str,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get current store/index number from device.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: Unit identifier
|
||||||
|
device_type: "slm" | "seismograph"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict with current index_number
|
||||||
|
"""
|
||||||
|
if device_type == "slm":
|
||||||
|
try:
|
||||||
|
return await self.slmm_client.get_index_number(unit_id)
|
||||||
|
except SLMMClientError as e:
|
||||||
|
raise DeviceControllerError(f"SLMM error: {str(e)}")
|
||||||
|
|
||||||
|
elif device_type == "seismograph":
|
||||||
|
return {
|
||||||
|
"status": "not_applicable",
|
||||||
|
"message": "Index number not applicable for seismographs",
|
||||||
|
"unit_id": unit_id,
|
||||||
|
}
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise UnsupportedDeviceTypeError(f"Unsupported device type: {device_type}")
|
||||||
|
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
# Health Check
|
# Health Check
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
|
|||||||
184
backend/services/device_status_monitor.py
Normal file
184
backend/services/device_status_monitor.py
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
"""
|
||||||
|
Device Status Monitor
|
||||||
|
|
||||||
|
Background task that monitors device reachability via SLMM polling status
|
||||||
|
and triggers alerts when devices go offline or come back online.
|
||||||
|
|
||||||
|
This service bridges SLMM's device polling with Terra-View's alert system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Dict
|
||||||
|
|
||||||
|
from backend.database import SessionLocal
|
||||||
|
from backend.services.slmm_client import get_slmm_client, SLMMClientError
|
||||||
|
from backend.services.alert_service import get_alert_service
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceStatusMonitor:
|
||||||
|
"""
|
||||||
|
Monitors device reachability via SLMM's polling status endpoint.
|
||||||
|
|
||||||
|
Detects state transitions (online→offline, offline→online) and
|
||||||
|
triggers AlertService to create/resolve alerts.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
monitor = DeviceStatusMonitor()
|
||||||
|
await monitor.start() # Start background monitoring
|
||||||
|
monitor.stop() # Stop monitoring
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, check_interval: int = 60):
|
||||||
|
"""
|
||||||
|
Initialize the monitor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
check_interval: Seconds between status checks (default: 60)
|
||||||
|
"""
|
||||||
|
self.check_interval = check_interval
|
||||||
|
self.running = False
|
||||||
|
self.task: Optional[asyncio.Task] = None
|
||||||
|
self.slmm_client = get_slmm_client()
|
||||||
|
|
||||||
|
# Track previous device states to detect transitions
|
||||||
|
self._device_states: Dict[str, bool] = {}
|
||||||
|
|
||||||
|
async def start(self):
|
||||||
|
"""Start the monitoring background task."""
|
||||||
|
if self.running:
|
||||||
|
logger.warning("DeviceStatusMonitor is already running")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.running = True
|
||||||
|
self.task = asyncio.create_task(self._monitor_loop())
|
||||||
|
logger.info(f"DeviceStatusMonitor started (checking every {self.check_interval}s)")
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
"""Stop the monitoring background task."""
|
||||||
|
self.running = False
|
||||||
|
if self.task:
|
||||||
|
self.task.cancel()
|
||||||
|
logger.info("DeviceStatusMonitor stopped")
|
||||||
|
|
||||||
|
async def _monitor_loop(self):
|
||||||
|
"""Main monitoring loop."""
|
||||||
|
while self.running:
|
||||||
|
try:
|
||||||
|
await self._check_all_devices()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in device status monitor: {e}", exc_info=True)
|
||||||
|
|
||||||
|
# Sleep in small intervals for graceful shutdown
|
||||||
|
for _ in range(self.check_interval):
|
||||||
|
if not self.running:
|
||||||
|
break
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
logger.info("DeviceStatusMonitor loop exited")
|
||||||
|
|
||||||
|
async def _check_all_devices(self):
|
||||||
|
"""
|
||||||
|
Fetch polling status from SLMM and detect state transitions.
|
||||||
|
|
||||||
|
Uses GET /api/slmm/_polling/status (proxied to SLMM)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Get status from SLMM
|
||||||
|
status_response = await self.slmm_client.get_polling_status()
|
||||||
|
devices = status_response.get("devices", [])
|
||||||
|
|
||||||
|
if not devices:
|
||||||
|
logger.debug("No devices in polling status response")
|
||||||
|
return
|
||||||
|
|
||||||
|
db = SessionLocal()
|
||||||
|
try:
|
||||||
|
alert_service = get_alert_service(db)
|
||||||
|
|
||||||
|
for device in devices:
|
||||||
|
unit_id = device.get("unit_id")
|
||||||
|
if not unit_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
is_reachable = device.get("is_reachable", True)
|
||||||
|
previous_reachable = self._device_states.get(unit_id)
|
||||||
|
|
||||||
|
# Skip if this is the first check (no previous state)
|
||||||
|
if previous_reachable is None:
|
||||||
|
self._device_states[unit_id] = is_reachable
|
||||||
|
logger.debug(f"Initial state for {unit_id}: reachable={is_reachable}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Detect offline transition (was online, now offline)
|
||||||
|
if previous_reachable and not is_reachable:
|
||||||
|
logger.warning(f"Device {unit_id} went OFFLINE")
|
||||||
|
alert_service.create_device_offline_alert(
|
||||||
|
unit_id=unit_id,
|
||||||
|
consecutive_failures=device.get("consecutive_failures", 0),
|
||||||
|
last_error=device.get("last_error"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Detect online transition (was offline, now online)
|
||||||
|
elif not previous_reachable and is_reachable:
|
||||||
|
logger.info(f"Device {unit_id} came back ONLINE")
|
||||||
|
alert_service.resolve_device_offline_alert(unit_id)
|
||||||
|
|
||||||
|
# Update tracked state
|
||||||
|
self._device_states[unit_id] = is_reachable
|
||||||
|
|
||||||
|
# Cleanup expired alerts while we're here
|
||||||
|
alert_service.cleanup_expired_alerts()
|
||||||
|
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
except SLMMClientError as e:
|
||||||
|
logger.warning(f"Could not reach SLMM for status check: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error checking device status: {e}", exc_info=True)
|
||||||
|
|
||||||
|
def get_tracked_devices(self) -> Dict[str, bool]:
|
||||||
|
"""
|
||||||
|
Get the current tracked device states.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict mapping unit_id to is_reachable status
|
||||||
|
"""
|
||||||
|
return dict(self._device_states)
|
||||||
|
|
||||||
|
def clear_tracked_devices(self):
|
||||||
|
"""Clear all tracked device states (useful for testing)."""
|
||||||
|
self._device_states.clear()
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton instance
|
||||||
|
_monitor_instance: Optional[DeviceStatusMonitor] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_device_status_monitor() -> DeviceStatusMonitor:
|
||||||
|
"""
|
||||||
|
Get the device status monitor singleton instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DeviceStatusMonitor instance
|
||||||
|
"""
|
||||||
|
global _monitor_instance
|
||||||
|
if _monitor_instance is None:
|
||||||
|
_monitor_instance = DeviceStatusMonitor()
|
||||||
|
return _monitor_instance
|
||||||
|
|
||||||
|
|
||||||
|
async def start_device_status_monitor():
|
||||||
|
"""Start the global device status monitor."""
|
||||||
|
monitor = get_device_status_monitor()
|
||||||
|
await monitor.start()
|
||||||
|
|
||||||
|
|
||||||
|
def stop_device_status_monitor():
|
||||||
|
"""Stop the global device status monitor."""
|
||||||
|
monitor = get_device_status_monitor()
|
||||||
|
monitor.stop()
|
||||||
550
backend/services/recurring_schedule_service.py
Normal file
550
backend/services/recurring_schedule_service.py
Normal file
@@ -0,0 +1,550 @@
|
|||||||
|
"""
|
||||||
|
Recurring Schedule Service
|
||||||
|
|
||||||
|
Manages recurring schedule definitions and generates ScheduledAction
|
||||||
|
instances based on patterns (weekly calendar, simple interval).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta, date, time
|
||||||
|
from typing import Optional, List, Dict, Any, Tuple
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy import and_
|
||||||
|
|
||||||
|
from backend.models import RecurringSchedule, ScheduledAction, MonitoringLocation, UnitAssignment
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Day name mapping
|
||||||
|
DAY_NAMES = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"]
|
||||||
|
|
||||||
|
|
||||||
|
class RecurringScheduleService:
|
||||||
|
"""
|
||||||
|
Service for managing recurring schedules and generating ScheduledActions.
|
||||||
|
|
||||||
|
Supports two schedule types:
|
||||||
|
- weekly_calendar: Specific days with start/end times
|
||||||
|
- simple_interval: Daily stop/download/restart cycles for 24/7 monitoring
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, db: Session):
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
def create_schedule(
|
||||||
|
self,
|
||||||
|
project_id: str,
|
||||||
|
location_id: str,
|
||||||
|
name: str,
|
||||||
|
schedule_type: str,
|
||||||
|
device_type: str = "slm",
|
||||||
|
unit_id: str = None,
|
||||||
|
weekly_pattern: dict = None,
|
||||||
|
interval_type: str = None,
|
||||||
|
cycle_time: str = None,
|
||||||
|
include_download: bool = True,
|
||||||
|
auto_increment_index: bool = True,
|
||||||
|
timezone: str = "America/New_York",
|
||||||
|
) -> RecurringSchedule:
|
||||||
|
"""
|
||||||
|
Create a new recurring schedule.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_id: Project ID
|
||||||
|
location_id: Monitoring location ID
|
||||||
|
name: Schedule name
|
||||||
|
schedule_type: "weekly_calendar" or "simple_interval"
|
||||||
|
device_type: "slm" or "seismograph"
|
||||||
|
unit_id: Specific unit (optional, can use assignment)
|
||||||
|
weekly_pattern: Dict of day patterns for weekly_calendar
|
||||||
|
interval_type: "daily" or "hourly" for simple_interval
|
||||||
|
cycle_time: Time string "HH:MM" for cycle
|
||||||
|
include_download: Whether to download data on cycle
|
||||||
|
auto_increment_index: Whether to auto-increment store index before start
|
||||||
|
timezone: Timezone for schedule times
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created RecurringSchedule
|
||||||
|
"""
|
||||||
|
schedule = RecurringSchedule(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
project_id=project_id,
|
||||||
|
location_id=location_id,
|
||||||
|
unit_id=unit_id,
|
||||||
|
name=name,
|
||||||
|
schedule_type=schedule_type,
|
||||||
|
device_type=device_type,
|
||||||
|
weekly_pattern=json.dumps(weekly_pattern) if weekly_pattern else None,
|
||||||
|
interval_type=interval_type,
|
||||||
|
cycle_time=cycle_time,
|
||||||
|
include_download=include_download,
|
||||||
|
auto_increment_index=auto_increment_index,
|
||||||
|
enabled=True,
|
||||||
|
timezone=timezone,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Calculate next occurrence
|
||||||
|
schedule.next_occurrence = self._calculate_next_occurrence(schedule)
|
||||||
|
|
||||||
|
self.db.add(schedule)
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(schedule)
|
||||||
|
|
||||||
|
logger.info(f"Created recurring schedule: {name} ({schedule_type})")
|
||||||
|
return schedule
|
||||||
|
|
||||||
|
def update_schedule(
|
||||||
|
self,
|
||||||
|
schedule_id: str,
|
||||||
|
**kwargs,
|
||||||
|
) -> Optional[RecurringSchedule]:
|
||||||
|
"""
|
||||||
|
Update a recurring schedule.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schedule_id: Schedule to update
|
||||||
|
**kwargs: Fields to update
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Updated schedule or None
|
||||||
|
"""
|
||||||
|
schedule = self.db.query(RecurringSchedule).filter_by(id=schedule_id).first()
|
||||||
|
if not schedule:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for key, value in kwargs.items():
|
||||||
|
if hasattr(schedule, key):
|
||||||
|
if key == "weekly_pattern" and isinstance(value, dict):
|
||||||
|
value = json.dumps(value)
|
||||||
|
setattr(schedule, key, value)
|
||||||
|
|
||||||
|
# Recalculate next occurrence
|
||||||
|
schedule.next_occurrence = self._calculate_next_occurrence(schedule)
|
||||||
|
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(schedule)
|
||||||
|
|
||||||
|
logger.info(f"Updated recurring schedule: {schedule.name}")
|
||||||
|
return schedule
|
||||||
|
|
||||||
|
def delete_schedule(self, schedule_id: str) -> bool:
|
||||||
|
"""
|
||||||
|
Delete a recurring schedule and its pending generated actions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schedule_id: Schedule to delete
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if deleted, False if not found
|
||||||
|
"""
|
||||||
|
schedule = self.db.query(RecurringSchedule).filter_by(id=schedule_id).first()
|
||||||
|
if not schedule:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Delete pending generated actions for this schedule
|
||||||
|
# Note: We don't have recurring_schedule_id field yet, so we can't clean up
|
||||||
|
# generated actions. This is fine for now.
|
||||||
|
|
||||||
|
self.db.delete(schedule)
|
||||||
|
self.db.commit()
|
||||||
|
|
||||||
|
logger.info(f"Deleted recurring schedule: {schedule.name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def enable_schedule(self, schedule_id: str) -> Optional[RecurringSchedule]:
|
||||||
|
"""Enable a disabled schedule."""
|
||||||
|
return self.update_schedule(schedule_id, enabled=True)
|
||||||
|
|
||||||
|
def disable_schedule(self, schedule_id: str) -> Optional[RecurringSchedule]:
|
||||||
|
"""Disable a schedule."""
|
||||||
|
return self.update_schedule(schedule_id, enabled=False)
|
||||||
|
|
||||||
|
def generate_actions_for_schedule(
|
||||||
|
self,
|
||||||
|
schedule: RecurringSchedule,
|
||||||
|
horizon_days: int = 7,
|
||||||
|
preview_only: bool = False,
|
||||||
|
) -> List[ScheduledAction]:
|
||||||
|
"""
|
||||||
|
Generate ScheduledAction entries for the next N days based on pattern.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schedule: The recurring schedule
|
||||||
|
horizon_days: Days ahead to generate
|
||||||
|
preview_only: If True, don't save to DB (for preview)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of generated ScheduledAction instances
|
||||||
|
"""
|
||||||
|
if not schedule.enabled:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if schedule.schedule_type == "weekly_calendar":
|
||||||
|
actions = self._generate_weekly_calendar_actions(schedule, horizon_days)
|
||||||
|
elif schedule.schedule_type == "simple_interval":
|
||||||
|
actions = self._generate_interval_actions(schedule, horizon_days)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Unknown schedule type: {schedule.schedule_type}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
if not preview_only and actions:
|
||||||
|
for action in actions:
|
||||||
|
self.db.add(action)
|
||||||
|
|
||||||
|
schedule.last_generated_at = datetime.utcnow()
|
||||||
|
schedule.next_occurrence = self._calculate_next_occurrence(schedule)
|
||||||
|
|
||||||
|
self.db.commit()
|
||||||
|
logger.info(f"Generated {len(actions)} actions for schedule: {schedule.name}")
|
||||||
|
|
||||||
|
return actions
|
||||||
|
|
||||||
|
def _generate_weekly_calendar_actions(
|
||||||
|
self,
|
||||||
|
schedule: RecurringSchedule,
|
||||||
|
horizon_days: int,
|
||||||
|
) -> List[ScheduledAction]:
|
||||||
|
"""
|
||||||
|
Generate actions from weekly calendar pattern.
|
||||||
|
|
||||||
|
Pattern format:
|
||||||
|
{
|
||||||
|
"monday": {"enabled": true, "start": "19:00", "end": "07:00"},
|
||||||
|
"tuesday": {"enabled": false},
|
||||||
|
...
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
if not schedule.weekly_pattern:
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
pattern = json.loads(schedule.weekly_pattern)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
logger.error(f"Invalid weekly_pattern JSON for schedule {schedule.id}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
actions = []
|
||||||
|
tz = ZoneInfo(schedule.timezone)
|
||||||
|
now_utc = datetime.utcnow()
|
||||||
|
now_local = now_utc.replace(tzinfo=ZoneInfo("UTC")).astimezone(tz)
|
||||||
|
|
||||||
|
# Get unit_id (from schedule or assignment)
|
||||||
|
unit_id = self._resolve_unit_id(schedule)
|
||||||
|
|
||||||
|
for day_offset in range(horizon_days):
|
||||||
|
check_date = now_local.date() + timedelta(days=day_offset)
|
||||||
|
day_name = DAY_NAMES[check_date.weekday()]
|
||||||
|
day_config = pattern.get(day_name, {})
|
||||||
|
|
||||||
|
if not day_config.get("enabled", False):
|
||||||
|
continue
|
||||||
|
|
||||||
|
start_time_str = day_config.get("start")
|
||||||
|
end_time_str = day_config.get("end")
|
||||||
|
|
||||||
|
if not start_time_str or not end_time_str:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Parse times
|
||||||
|
start_time = self._parse_time(start_time_str)
|
||||||
|
end_time = self._parse_time(end_time_str)
|
||||||
|
|
||||||
|
if not start_time or not end_time:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Create start datetime in local timezone
|
||||||
|
start_local = datetime.combine(check_date, start_time, tzinfo=tz)
|
||||||
|
start_utc = start_local.astimezone(ZoneInfo("UTC")).replace(tzinfo=None)
|
||||||
|
|
||||||
|
# Handle overnight schedules (end time is next day)
|
||||||
|
if end_time <= start_time:
|
||||||
|
end_date = check_date + timedelta(days=1)
|
||||||
|
else:
|
||||||
|
end_date = check_date
|
||||||
|
|
||||||
|
end_local = datetime.combine(end_date, end_time, tzinfo=tz)
|
||||||
|
end_utc = end_local.astimezone(ZoneInfo("UTC")).replace(tzinfo=None)
|
||||||
|
|
||||||
|
# Skip if start time has already passed
|
||||||
|
if start_utc <= now_utc:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if action already exists
|
||||||
|
if self._action_exists(schedule.project_id, schedule.location_id, "start", start_utc):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Build notes with automation metadata
|
||||||
|
start_notes = json.dumps({
|
||||||
|
"schedule_name": schedule.name,
|
||||||
|
"schedule_id": schedule.id,
|
||||||
|
"auto_increment_index": schedule.auto_increment_index,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Create START action
|
||||||
|
start_action = ScheduledAction(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
project_id=schedule.project_id,
|
||||||
|
location_id=schedule.location_id,
|
||||||
|
unit_id=unit_id,
|
||||||
|
action_type="start",
|
||||||
|
device_type=schedule.device_type,
|
||||||
|
scheduled_time=start_utc,
|
||||||
|
execution_status="pending",
|
||||||
|
notes=start_notes,
|
||||||
|
)
|
||||||
|
actions.append(start_action)
|
||||||
|
|
||||||
|
# Create STOP action
|
||||||
|
stop_notes = json.dumps({
|
||||||
|
"schedule_name": schedule.name,
|
||||||
|
"schedule_id": schedule.id,
|
||||||
|
})
|
||||||
|
stop_action = ScheduledAction(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
project_id=schedule.project_id,
|
||||||
|
location_id=schedule.location_id,
|
||||||
|
unit_id=unit_id,
|
||||||
|
action_type="stop",
|
||||||
|
device_type=schedule.device_type,
|
||||||
|
scheduled_time=end_utc,
|
||||||
|
execution_status="pending",
|
||||||
|
notes=stop_notes,
|
||||||
|
)
|
||||||
|
actions.append(stop_action)
|
||||||
|
|
||||||
|
# Create DOWNLOAD action if enabled (1 minute after stop)
|
||||||
|
if schedule.include_download:
|
||||||
|
download_time = end_utc + timedelta(minutes=1)
|
||||||
|
download_notes = json.dumps({
|
||||||
|
"schedule_name": schedule.name,
|
||||||
|
"schedule_id": schedule.id,
|
||||||
|
"schedule_type": "weekly_calendar",
|
||||||
|
})
|
||||||
|
download_action = ScheduledAction(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
project_id=schedule.project_id,
|
||||||
|
location_id=schedule.location_id,
|
||||||
|
unit_id=unit_id,
|
||||||
|
action_type="download",
|
||||||
|
device_type=schedule.device_type,
|
||||||
|
scheduled_time=download_time,
|
||||||
|
execution_status="pending",
|
||||||
|
notes=download_notes,
|
||||||
|
)
|
||||||
|
actions.append(download_action)
|
||||||
|
|
||||||
|
return actions
|
||||||
|
|
||||||
|
def _generate_interval_actions(
|
||||||
|
self,
|
||||||
|
schedule: RecurringSchedule,
|
||||||
|
horizon_days: int,
|
||||||
|
) -> List[ScheduledAction]:
|
||||||
|
"""
|
||||||
|
Generate actions from simple interval pattern.
|
||||||
|
|
||||||
|
For daily cycles: stop, download (optional), start at cycle_time each day.
|
||||||
|
"""
|
||||||
|
if not schedule.cycle_time:
|
||||||
|
return []
|
||||||
|
|
||||||
|
cycle_time = self._parse_time(schedule.cycle_time)
|
||||||
|
if not cycle_time:
|
||||||
|
return []
|
||||||
|
|
||||||
|
actions = []
|
||||||
|
tz = ZoneInfo(schedule.timezone)
|
||||||
|
now_utc = datetime.utcnow()
|
||||||
|
now_local = now_utc.replace(tzinfo=ZoneInfo("UTC")).astimezone(tz)
|
||||||
|
|
||||||
|
# Get unit_id
|
||||||
|
unit_id = self._resolve_unit_id(schedule)
|
||||||
|
|
||||||
|
for day_offset in range(horizon_days):
|
||||||
|
check_date = now_local.date() + timedelta(days=day_offset)
|
||||||
|
|
||||||
|
# Create cycle datetime in local timezone
|
||||||
|
cycle_local = datetime.combine(check_date, cycle_time, tzinfo=tz)
|
||||||
|
cycle_utc = cycle_local.astimezone(ZoneInfo("UTC")).replace(tzinfo=None)
|
||||||
|
|
||||||
|
# Skip if time has passed
|
||||||
|
if cycle_utc <= now_utc:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if action already exists
|
||||||
|
if self._action_exists(schedule.project_id, schedule.location_id, "stop", cycle_utc):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Build notes with metadata
|
||||||
|
stop_notes = json.dumps({
|
||||||
|
"schedule_name": schedule.name,
|
||||||
|
"schedule_id": schedule.id,
|
||||||
|
"cycle_type": "daily",
|
||||||
|
})
|
||||||
|
|
||||||
|
# Create STOP action
|
||||||
|
stop_action = ScheduledAction(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
project_id=schedule.project_id,
|
||||||
|
location_id=schedule.location_id,
|
||||||
|
unit_id=unit_id,
|
||||||
|
action_type="stop",
|
||||||
|
device_type=schedule.device_type,
|
||||||
|
scheduled_time=cycle_utc,
|
||||||
|
execution_status="pending",
|
||||||
|
notes=stop_notes,
|
||||||
|
)
|
||||||
|
actions.append(stop_action)
|
||||||
|
|
||||||
|
# Create DOWNLOAD action if enabled (1 minute after stop)
|
||||||
|
if schedule.include_download:
|
||||||
|
download_time = cycle_utc + timedelta(minutes=1)
|
||||||
|
download_notes = json.dumps({
|
||||||
|
"schedule_name": schedule.name,
|
||||||
|
"schedule_id": schedule.id,
|
||||||
|
"cycle_type": "daily",
|
||||||
|
})
|
||||||
|
download_action = ScheduledAction(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
project_id=schedule.project_id,
|
||||||
|
location_id=schedule.location_id,
|
||||||
|
unit_id=unit_id,
|
||||||
|
action_type="download",
|
||||||
|
device_type=schedule.device_type,
|
||||||
|
scheduled_time=download_time,
|
||||||
|
execution_status="pending",
|
||||||
|
notes=download_notes,
|
||||||
|
)
|
||||||
|
actions.append(download_action)
|
||||||
|
|
||||||
|
# Create START action (2 minutes after stop, or 1 minute after download)
|
||||||
|
start_offset = 2 if schedule.include_download else 1
|
||||||
|
start_time = cycle_utc + timedelta(minutes=start_offset)
|
||||||
|
start_notes = json.dumps({
|
||||||
|
"schedule_name": schedule.name,
|
||||||
|
"schedule_id": schedule.id,
|
||||||
|
"cycle_type": "daily",
|
||||||
|
"auto_increment_index": schedule.auto_increment_index,
|
||||||
|
})
|
||||||
|
start_action = ScheduledAction(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
project_id=schedule.project_id,
|
||||||
|
location_id=schedule.location_id,
|
||||||
|
unit_id=unit_id,
|
||||||
|
action_type="start",
|
||||||
|
device_type=schedule.device_type,
|
||||||
|
scheduled_time=start_time,
|
||||||
|
execution_status="pending",
|
||||||
|
notes=start_notes,
|
||||||
|
)
|
||||||
|
actions.append(start_action)
|
||||||
|
|
||||||
|
return actions
|
||||||
|
|
||||||
|
def _calculate_next_occurrence(self, schedule: RecurringSchedule) -> Optional[datetime]:
|
||||||
|
"""Calculate when the next action should occur."""
|
||||||
|
if not schedule.enabled:
|
||||||
|
return None
|
||||||
|
|
||||||
|
tz = ZoneInfo(schedule.timezone)
|
||||||
|
now_utc = datetime.utcnow()
|
||||||
|
now_local = now_utc.replace(tzinfo=ZoneInfo("UTC")).astimezone(tz)
|
||||||
|
|
||||||
|
if schedule.schedule_type == "weekly_calendar" and schedule.weekly_pattern:
|
||||||
|
try:
|
||||||
|
pattern = json.loads(schedule.weekly_pattern)
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Find next enabled day
|
||||||
|
for day_offset in range(8): # Check up to a week ahead
|
||||||
|
check_date = now_local.date() + timedelta(days=day_offset)
|
||||||
|
day_name = DAY_NAMES[check_date.weekday()]
|
||||||
|
day_config = pattern.get(day_name, {})
|
||||||
|
|
||||||
|
if day_config.get("enabled") and day_config.get("start"):
|
||||||
|
start_time = self._parse_time(day_config["start"])
|
||||||
|
if start_time:
|
||||||
|
start_local = datetime.combine(check_date, start_time, tzinfo=tz)
|
||||||
|
start_utc = start_local.astimezone(ZoneInfo("UTC")).replace(tzinfo=None)
|
||||||
|
if start_utc > now_utc:
|
||||||
|
return start_utc
|
||||||
|
|
||||||
|
elif schedule.schedule_type == "simple_interval" and schedule.cycle_time:
|
||||||
|
cycle_time = self._parse_time(schedule.cycle_time)
|
||||||
|
if cycle_time:
|
||||||
|
# Find next cycle time
|
||||||
|
for day_offset in range(2):
|
||||||
|
check_date = now_local.date() + timedelta(days=day_offset)
|
||||||
|
cycle_local = datetime.combine(check_date, cycle_time, tzinfo=tz)
|
||||||
|
cycle_utc = cycle_local.astimezone(ZoneInfo("UTC")).replace(tzinfo=None)
|
||||||
|
if cycle_utc > now_utc:
|
||||||
|
return cycle_utc
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _resolve_unit_id(self, schedule: RecurringSchedule) -> Optional[str]:
|
||||||
|
"""Get unit_id from schedule or active assignment."""
|
||||||
|
if schedule.unit_id:
|
||||||
|
return schedule.unit_id
|
||||||
|
|
||||||
|
# Try to get from active assignment
|
||||||
|
assignment = self.db.query(UnitAssignment).filter(
|
||||||
|
and_(
|
||||||
|
UnitAssignment.location_id == schedule.location_id,
|
||||||
|
UnitAssignment.status == "active",
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
|
||||||
|
return assignment.unit_id if assignment else None
|
||||||
|
|
||||||
|
def _action_exists(
|
||||||
|
self,
|
||||||
|
project_id: str,
|
||||||
|
location_id: str,
|
||||||
|
action_type: str,
|
||||||
|
scheduled_time: datetime,
|
||||||
|
) -> bool:
|
||||||
|
"""Check if an action already exists for this time slot."""
|
||||||
|
# Allow 5-minute window for duplicate detection
|
||||||
|
time_window_start = scheduled_time - timedelta(minutes=5)
|
||||||
|
time_window_end = scheduled_time + timedelta(minutes=5)
|
||||||
|
|
||||||
|
exists = self.db.query(ScheduledAction).filter(
|
||||||
|
and_(
|
||||||
|
ScheduledAction.project_id == project_id,
|
||||||
|
ScheduledAction.location_id == location_id,
|
||||||
|
ScheduledAction.action_type == action_type,
|
||||||
|
ScheduledAction.scheduled_time >= time_window_start,
|
||||||
|
ScheduledAction.scheduled_time <= time_window_end,
|
||||||
|
ScheduledAction.execution_status == "pending",
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
|
||||||
|
return exists is not None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_time(time_str: str) -> Optional[time]:
|
||||||
|
"""Parse time string "HH:MM" to time object."""
|
||||||
|
try:
|
||||||
|
parts = time_str.split(":")
|
||||||
|
return time(int(parts[0]), int(parts[1]))
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_schedules_for_project(self, project_id: str) -> List[RecurringSchedule]:
|
||||||
|
"""Get all recurring schedules for a project."""
|
||||||
|
return self.db.query(RecurringSchedule).filter_by(project_id=project_id).all()
|
||||||
|
|
||||||
|
def get_enabled_schedules(self) -> List[RecurringSchedule]:
|
||||||
|
"""Get all enabled recurring schedules."""
|
||||||
|
return self.db.query(RecurringSchedule).filter_by(enabled=True).all()
|
||||||
|
|
||||||
|
|
||||||
|
def get_recurring_schedule_service(db: Session) -> RecurringScheduleService:
|
||||||
|
"""Get a RecurringScheduleService instance."""
|
||||||
|
return RecurringScheduleService(db)
|
||||||
@@ -4,22 +4,29 @@ Scheduler Service
|
|||||||
Executes scheduled actions for Projects system.
|
Executes scheduled actions for Projects system.
|
||||||
Monitors pending scheduled actions and executes them by calling device modules (SLMM/SFM).
|
Monitors pending scheduled actions and executes them by calling device modules (SLMM/SFM).
|
||||||
|
|
||||||
|
Extended to support recurring schedules:
|
||||||
|
- Generates ScheduledActions from RecurringSchedule patterns
|
||||||
|
- Cleans up old completed/failed actions
|
||||||
|
|
||||||
This service runs as a background task in FastAPI, checking for pending actions
|
This service runs as a background task in FastAPI, checking for pending actions
|
||||||
every minute and executing them when their scheduled time arrives.
|
every minute and executing them when their scheduled time arrives.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Optional, List, Dict, Any
|
from typing import Optional, List, Dict, Any
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
|
|
||||||
from backend.database import SessionLocal
|
from backend.database import SessionLocal
|
||||||
from backend.models import ScheduledAction, RecordingSession, MonitoringLocation, Project
|
from backend.models import ScheduledAction, RecordingSession, MonitoringLocation, Project, RecurringSchedule
|
||||||
from backend.services.device_controller import get_device_controller, DeviceControllerError
|
from backend.services.device_controller import get_device_controller, DeviceControllerError
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SchedulerService:
|
class SchedulerService:
|
||||||
"""
|
"""
|
||||||
@@ -62,11 +69,26 @@ class SchedulerService:
|
|||||||
|
|
||||||
async def _run_loop(self):
|
async def _run_loop(self):
|
||||||
"""Main scheduler loop."""
|
"""Main scheduler loop."""
|
||||||
|
# Track when we last generated recurring actions (do this once per hour)
|
||||||
|
last_generation_check = datetime.utcnow() - timedelta(hours=1)
|
||||||
|
|
||||||
while self.running:
|
while self.running:
|
||||||
try:
|
try:
|
||||||
|
# Execute pending actions
|
||||||
await self.execute_pending_actions()
|
await self.execute_pending_actions()
|
||||||
|
|
||||||
|
# Generate actions from recurring schedules (every hour)
|
||||||
|
now = datetime.utcnow()
|
||||||
|
if (now - last_generation_check).total_seconds() >= 3600:
|
||||||
|
await self.generate_recurring_actions()
|
||||||
|
last_generation_check = now
|
||||||
|
|
||||||
|
# Cleanup old actions (also every hour, during generation cycle)
|
||||||
|
if (now - last_generation_check).total_seconds() < 60:
|
||||||
|
await self.cleanup_old_actions()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Scheduler error: {e}")
|
logger.error(f"Scheduler error: {e}", exc_info=True)
|
||||||
# Continue running even if there's an error
|
# Continue running even if there's an error
|
||||||
|
|
||||||
await asyncio.sleep(self.check_interval)
|
await asyncio.sleep(self.check_interval)
|
||||||
@@ -194,11 +216,34 @@ class SchedulerService:
|
|||||||
db: Session,
|
db: Session,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Execute a 'start' action."""
|
"""Execute a 'start' action."""
|
||||||
|
# Parse action notes for automation settings
|
||||||
|
auto_increment_index = False
|
||||||
|
try:
|
||||||
|
if action.notes:
|
||||||
|
notes_data = json.loads(action.notes)
|
||||||
|
auto_increment_index = notes_data.get("auto_increment_index", False)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass # Notes is plain text, not JSON
|
||||||
|
|
||||||
|
# If auto_increment_index is enabled, increment the store index before starting
|
||||||
|
increment_response = None
|
||||||
|
if auto_increment_index and action.device_type == "slm":
|
||||||
|
try:
|
||||||
|
logger.info(f"Auto-incrementing store index for unit {unit_id}")
|
||||||
|
increment_response = await self.device_controller.increment_index(
|
||||||
|
unit_id,
|
||||||
|
action.device_type,
|
||||||
|
)
|
||||||
|
logger.info(f"Index incremented: {increment_response}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to increment index for {unit_id}: {e}")
|
||||||
|
# Continue with start anyway - don't fail the whole action
|
||||||
|
|
||||||
# Start recording via device controller
|
# Start recording via device controller
|
||||||
response = await self.device_controller.start_recording(
|
response = await self.device_controller.start_recording(
|
||||||
unit_id,
|
unit_id,
|
||||||
action.device_type,
|
action.device_type,
|
||||||
config={}, # TODO: Load config from action.notes or metadata
|
config={},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create recording session
|
# Create recording session
|
||||||
@@ -210,7 +255,11 @@ class SchedulerService:
|
|||||||
session_type="sound" if action.device_type == "slm" else "vibration",
|
session_type="sound" if action.device_type == "slm" else "vibration",
|
||||||
started_at=datetime.utcnow(),
|
started_at=datetime.utcnow(),
|
||||||
status="recording",
|
status="recording",
|
||||||
session_metadata=json.dumps({"scheduled_action_id": action.id}),
|
session_metadata=json.dumps({
|
||||||
|
"scheduled_action_id": action.id,
|
||||||
|
"auto_increment_index": auto_increment_index,
|
||||||
|
"increment_response": increment_response,
|
||||||
|
}),
|
||||||
)
|
)
|
||||||
db.add(session)
|
db.add(session)
|
||||||
|
|
||||||
@@ -218,6 +267,8 @@ class SchedulerService:
|
|||||||
"status": "started",
|
"status": "started",
|
||||||
"session_id": session.id,
|
"session_id": session.id,
|
||||||
"device_response": response,
|
"device_response": response,
|
||||||
|
"index_incremented": auto_increment_index,
|
||||||
|
"increment_response": increment_response,
|
||||||
}
|
}
|
||||||
|
|
||||||
async def _execute_stop(
|
async def _execute_stop(
|
||||||
@@ -295,6 +346,90 @@ class SchedulerService:
|
|||||||
"device_response": response,
|
"device_response": response,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Recurring Schedule Generation
|
||||||
|
# ========================================================================
|
||||||
|
|
||||||
|
async def generate_recurring_actions(self) -> int:
|
||||||
|
"""
|
||||||
|
Generate ScheduledActions from all enabled recurring schedules.
|
||||||
|
|
||||||
|
Runs once per hour to generate actions for the next 7 days.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Total number of actions generated
|
||||||
|
"""
|
||||||
|
db = SessionLocal()
|
||||||
|
total_generated = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
from backend.services.recurring_schedule_service import get_recurring_schedule_service
|
||||||
|
|
||||||
|
service = get_recurring_schedule_service(db)
|
||||||
|
schedules = service.get_enabled_schedules()
|
||||||
|
|
||||||
|
if not schedules:
|
||||||
|
logger.debug("No enabled recurring schedules found")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
logger.info(f"Generating actions for {len(schedules)} recurring schedule(s)")
|
||||||
|
|
||||||
|
for schedule in schedules:
|
||||||
|
try:
|
||||||
|
actions = service.generate_actions_for_schedule(schedule, horizon_days=7)
|
||||||
|
total_generated += len(actions)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error generating actions for schedule {schedule.id}: {e}")
|
||||||
|
|
||||||
|
if total_generated > 0:
|
||||||
|
logger.info(f"Generated {total_generated} scheduled actions from recurring schedules")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in generate_recurring_actions: {e}", exc_info=True)
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
return total_generated
|
||||||
|
|
||||||
|
async def cleanup_old_actions(self, retention_days: int = 30) -> int:
|
||||||
|
"""
|
||||||
|
Remove old completed/failed actions to prevent database bloat.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
retention_days: Keep actions newer than this many days
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of actions cleaned up
|
||||||
|
"""
|
||||||
|
db = SessionLocal()
|
||||||
|
cleaned = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
cutoff = datetime.utcnow() - timedelta(days=retention_days)
|
||||||
|
|
||||||
|
old_actions = db.query(ScheduledAction).filter(
|
||||||
|
and_(
|
||||||
|
ScheduledAction.execution_status.in_(["completed", "failed", "cancelled"]),
|
||||||
|
ScheduledAction.executed_at < cutoff,
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
cleaned = len(old_actions)
|
||||||
|
for action in old_actions:
|
||||||
|
db.delete(action)
|
||||||
|
|
||||||
|
if cleaned > 0:
|
||||||
|
db.commit()
|
||||||
|
logger.info(f"Cleaned up {cleaned} old scheduled actions (>{retention_days} days)")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error cleaning up old actions: {e}")
|
||||||
|
db.rollback()
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
return cleaned
|
||||||
|
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
# Manual Execution (for testing/debugging)
|
# Manual Execution (for testing/debugging)
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
|
|||||||
@@ -276,6 +276,124 @@ class SLMMClient:
|
|||||||
"""
|
"""
|
||||||
return await self._request("POST", f"/{unit_id}/reset")
|
return await self._request("POST", f"/{unit_id}/reset")
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Store/Index Management
|
||||||
|
# ========================================================================
|
||||||
|
|
||||||
|
async def get_index_number(self, unit_id: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get current store/index number from device.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: Unit identifier
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with current index_number (store name)
|
||||||
|
"""
|
||||||
|
return await self._request("GET", f"/{unit_id}/index-number")
|
||||||
|
|
||||||
|
async def set_index_number(
|
||||||
|
self,
|
||||||
|
unit_id: str,
|
||||||
|
index_number: int,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Set store/index number on device.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: Unit identifier
|
||||||
|
index_number: New index number to set
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Confirmation response
|
||||||
|
"""
|
||||||
|
return await self._request(
|
||||||
|
"PUT",
|
||||||
|
f"/{unit_id}/index-number",
|
||||||
|
data={"index_number": index_number},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def check_overwrite_status(self, unit_id: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Check if data exists at the current store index.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: Unit identifier
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with:
|
||||||
|
- overwrite_status: "None" (safe) or "Exist" (would overwrite)
|
||||||
|
- will_overwrite: bool
|
||||||
|
- safe_to_store: bool
|
||||||
|
"""
|
||||||
|
return await self._request("GET", f"/{unit_id}/overwrite-check")
|
||||||
|
|
||||||
|
async def increment_index(self, unit_id: str, max_attempts: int = 100) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Find and set the next available (unused) store/index number.
|
||||||
|
|
||||||
|
Checks the current index - if it would overwrite existing data,
|
||||||
|
increments until finding an unused index number.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: Unit identifier
|
||||||
|
max_attempts: Maximum number of indices to try before giving up
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with old_index, new_index, and attempts_made
|
||||||
|
"""
|
||||||
|
# Get current index
|
||||||
|
current = await self.get_index_number(unit_id)
|
||||||
|
old_index = current.get("index_number", 0)
|
||||||
|
|
||||||
|
# Check if current index is safe
|
||||||
|
overwrite_check = await self.check_overwrite_status(unit_id)
|
||||||
|
if overwrite_check.get("safe_to_store", False):
|
||||||
|
# Current index is safe, no need to increment
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"old_index": old_index,
|
||||||
|
"new_index": old_index,
|
||||||
|
"unit_id": unit_id,
|
||||||
|
"already_safe": True,
|
||||||
|
"attempts_made": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Need to find an unused index
|
||||||
|
attempts = 0
|
||||||
|
test_index = old_index + 1
|
||||||
|
|
||||||
|
while attempts < max_attempts:
|
||||||
|
# Set the new index
|
||||||
|
await self.set_index_number(unit_id, test_index)
|
||||||
|
|
||||||
|
# Check if this index is safe
|
||||||
|
overwrite_check = await self.check_overwrite_status(unit_id)
|
||||||
|
attempts += 1
|
||||||
|
|
||||||
|
if overwrite_check.get("safe_to_store", False):
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"old_index": old_index,
|
||||||
|
"new_index": test_index,
|
||||||
|
"unit_id": unit_id,
|
||||||
|
"already_safe": False,
|
||||||
|
"attempts_made": attempts,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Try next index (wrap around at 9999)
|
||||||
|
test_index = (test_index + 1) % 10000
|
||||||
|
|
||||||
|
# Avoid infinite loops if we've wrapped around
|
||||||
|
if test_index == old_index:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Could not find a safe index
|
||||||
|
raise SLMMDeviceError(
|
||||||
|
f"Could not find unused store index for {unit_id} after {attempts} attempts. "
|
||||||
|
f"Consider downloading and clearing data from the device."
|
||||||
|
)
|
||||||
|
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
# Device Settings
|
# Device Settings
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
@@ -387,6 +505,73 @@ class SLMMClient:
|
|||||||
}
|
}
|
||||||
return await self._request("POST", f"/{unit_id}/ftp/download", data=data)
|
return await self._request("POST", f"/{unit_id}/ftp/download", data=data)
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Polling Status (for device monitoring/alerts)
|
||||||
|
# ========================================================================
|
||||||
|
|
||||||
|
async def get_polling_status(self) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get global polling status from SLMM.
|
||||||
|
|
||||||
|
Returns device reachability information for all polled devices.
|
||||||
|
Used by DeviceStatusMonitor to detect offline/online transitions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with devices list containing:
|
||||||
|
- unit_id
|
||||||
|
- is_reachable
|
||||||
|
- consecutive_failures
|
||||||
|
- last_poll_attempt
|
||||||
|
- last_success
|
||||||
|
- last_error
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||||
|
response = await client.get(f"{self.base_url}/api/nl43/_polling/status")
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
except httpx.ConnectError:
|
||||||
|
raise SLMMConnectionError("Cannot connect to SLMM for polling status")
|
||||||
|
except Exception as e:
|
||||||
|
raise SLMMClientError(f"Failed to get polling status: {str(e)}")
|
||||||
|
|
||||||
|
async def get_device_polling_config(self, unit_id: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get polling configuration for a specific device.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: Unit identifier
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with poll_enabled and poll_interval_seconds
|
||||||
|
"""
|
||||||
|
return await self._request("GET", f"/{unit_id}/polling/config")
|
||||||
|
|
||||||
|
async def update_device_polling_config(
|
||||||
|
self,
|
||||||
|
unit_id: str,
|
||||||
|
poll_enabled: Optional[bool] = None,
|
||||||
|
poll_interval_seconds: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Update polling configuration for a device.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unit_id: Unit identifier
|
||||||
|
poll_enabled: Enable/disable polling
|
||||||
|
poll_interval_seconds: Polling interval (10-3600)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Updated config
|
||||||
|
"""
|
||||||
|
config = {}
|
||||||
|
if poll_enabled is not None:
|
||||||
|
config["poll_enabled"] = poll_enabled
|
||||||
|
if poll_interval_seconds is not None:
|
||||||
|
config["poll_interval_seconds"] = poll_interval_seconds
|
||||||
|
|
||||||
|
return await self._request("PUT", f"/{unit_id}/polling/config", data=config)
|
||||||
|
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
# Health Check
|
# Health Check
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
|
|||||||
87
templates/partials/alerts/alert_dropdown.html
Normal file
87
templates/partials/alerts/alert_dropdown.html
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
<!-- Alert Dropdown Content -->
|
||||||
|
<!-- Loaded via HTMX into the alert dropdown in the navbar -->
|
||||||
|
|
||||||
|
<div class="max-h-96 overflow-y-auto">
|
||||||
|
{% if alerts %}
|
||||||
|
{% for item in alerts %}
|
||||||
|
<div class="p-3 border-b border-gray-200 dark:border-gray-700 hover:bg-gray-50 dark:hover:bg-gray-700/50 transition-colors
|
||||||
|
{% if item.alert.severity == 'critical' %}bg-red-50 dark:bg-red-900/20{% endif %}">
|
||||||
|
<div class="flex items-start gap-3">
|
||||||
|
<!-- Severity icon -->
|
||||||
|
{% if item.alert.severity == 'critical' %}
|
||||||
|
<span class="text-red-500 flex-shrink-0 mt-0.5">
|
||||||
|
<svg class="w-5 h-5" fill="currentColor" viewBox="0 0 20 20">
|
||||||
|
<path fill-rule="evenodd" d="M8.257 3.099c.765-1.36 2.722-1.36 3.486 0l5.58 9.92c.75 1.334-.213 2.98-1.742 2.98H4.42c-1.53 0-2.493-1.646-1.743-2.98l5.58-9.92zM11 13a1 1 0 11-2 0 1 1 0 012 0zm-1-8a1 1 0 00-1 1v3a1 1 0 002 0V6a1 1 0 00-1-1z" clip-rule="evenodd"/>
|
||||||
|
</svg>
|
||||||
|
</span>
|
||||||
|
{% elif item.alert.severity == 'warning' %}
|
||||||
|
<span class="text-yellow-500 flex-shrink-0 mt-0.5">
|
||||||
|
<svg class="w-5 h-5" fill="currentColor" viewBox="0 0 20 20">
|
||||||
|
<path fill-rule="evenodd" d="M8.257 3.099c.765-1.36 2.722-1.36 3.486 0l5.58 9.92c.75 1.334-.213 2.98-1.742 2.98H4.42c-1.53 0-2.493-1.646-1.743-2.98l5.58-9.92zM11 13a1 1 0 11-2 0 1 1 0 012 0zm-1-8a1 1 0 00-1 1v3a1 1 0 002 0V6a1 1 0 00-1-1z" clip-rule="evenodd"/>
|
||||||
|
</svg>
|
||||||
|
</span>
|
||||||
|
{% else %}
|
||||||
|
<span class="text-blue-500 flex-shrink-0 mt-0.5">
|
||||||
|
<svg class="w-5 h-5" fill="currentColor" viewBox="0 0 20 20">
|
||||||
|
<path fill-rule="evenodd" d="M18 10a8 8 0 11-16 0 8 8 0 0116 0zm-7-4a1 1 0 11-2 0 1 1 0 012 0zM9 9a1 1 0 000 2v3a1 1 0 001 1h1a1 1 0 100-2v-3a1 1 0 00-1-1H9z" clip-rule="evenodd"/>
|
||||||
|
</svg>
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<div class="flex-1 min-w-0">
|
||||||
|
<p class="text-sm font-medium text-gray-900 dark:text-white truncate">
|
||||||
|
{{ item.alert.title }}
|
||||||
|
</p>
|
||||||
|
{% if item.alert.message %}
|
||||||
|
<p class="text-xs text-gray-500 dark:text-gray-400 line-clamp-2 mt-0.5">
|
||||||
|
{{ item.alert.message }}
|
||||||
|
</p>
|
||||||
|
{% endif %}
|
||||||
|
<p class="text-xs text-gray-400 dark:text-gray-500 mt-1">
|
||||||
|
{{ item.time_ago }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Actions -->
|
||||||
|
<div class="flex items-center gap-1 flex-shrink-0">
|
||||||
|
<button hx-post="/api/alerts/{{ item.alert.id }}/acknowledge"
|
||||||
|
hx-swap="none"
|
||||||
|
hx-on::after-request="htmx.trigger('#alert-dropdown-content', 'refresh')"
|
||||||
|
class="p-1.5 text-gray-400 hover:text-green-600 dark:hover:text-green-400 rounded transition-colors"
|
||||||
|
title="Acknowledge">
|
||||||
|
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 13l4 4L19 7"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
<button hx-post="/api/alerts/{{ item.alert.id }}/dismiss"
|
||||||
|
hx-swap="none"
|
||||||
|
hx-on::after-request="htmx.trigger('#alert-dropdown-content', 'refresh')"
|
||||||
|
class="p-1.5 text-gray-400 hover:text-red-600 dark:hover:text-red-400 rounded transition-colors"
|
||||||
|
title="Dismiss">
|
||||||
|
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
{% else %}
|
||||||
|
<div class="p-8 text-center">
|
||||||
|
<svg class="w-12 h-12 mx-auto mb-3 text-gray-300 dark:text-gray-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"/>
|
||||||
|
</svg>
|
||||||
|
<p class="text-gray-500 dark:text-gray-400 text-sm">No active alerts</p>
|
||||||
|
<p class="text-gray-400 dark:text-gray-500 text-xs mt-1">All systems operational</p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- View all link -->
|
||||||
|
{% if total_count > 0 %}
|
||||||
|
<div class="p-3 border-t border-gray-200 dark:border-gray-700 text-center bg-gray-50 dark:bg-gray-800/50">
|
||||||
|
<a href="/alerts" class="text-sm text-seismo-orange hover:text-seismo-navy dark:hover:text-orange-300 font-medium">
|
||||||
|
View all {{ total_count }} alert{{ 's' if total_count != 1 else '' }}
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
125
templates/partials/alerts/alert_list.html
Normal file
125
templates/partials/alerts/alert_list.html
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
<!-- Alert List Partial -->
|
||||||
|
<!-- Full list of alerts for the alerts page -->
|
||||||
|
|
||||||
|
<div class="space-y-3">
|
||||||
|
{% if alerts %}
|
||||||
|
{% for item in alerts %}
|
||||||
|
<div class="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4
|
||||||
|
{% if item.alert.severity == 'critical' and item.alert.status == 'active' %}border-l-4 border-l-red-500{% endif %}
|
||||||
|
{% if item.alert.severity == 'warning' and item.alert.status == 'active' %}border-l-4 border-l-yellow-500{% endif %}
|
||||||
|
{% if item.alert.status != 'active' %}opacity-60{% endif %}">
|
||||||
|
<div class="flex items-start gap-4">
|
||||||
|
<!-- Severity icon -->
|
||||||
|
<div class="flex-shrink-0">
|
||||||
|
{% if item.alert.severity == 'critical' %}
|
||||||
|
<div class="w-10 h-10 rounded-full bg-red-100 dark:bg-red-900/30 flex items-center justify-center">
|
||||||
|
<svg class="w-5 h-5 text-red-600 dark:text-red-400" fill="currentColor" viewBox="0 0 20 20">
|
||||||
|
<path fill-rule="evenodd" d="M8.257 3.099c.765-1.36 2.722-1.36 3.486 0l5.58 9.92c.75 1.334-.213 2.98-1.742 2.98H4.42c-1.53 0-2.493-1.646-1.743-2.98l5.58-9.92zM11 13a1 1 0 11-2 0 1 1 0 012 0zm-1-8a1 1 0 00-1 1v3a1 1 0 002 0V6a1 1 0 00-1-1z" clip-rule="evenodd"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
{% elif item.alert.severity == 'warning' %}
|
||||||
|
<div class="w-10 h-10 rounded-full bg-yellow-100 dark:bg-yellow-900/30 flex items-center justify-center">
|
||||||
|
<svg class="w-5 h-5 text-yellow-600 dark:text-yellow-400" fill="currentColor" viewBox="0 0 20 20">
|
||||||
|
<path fill-rule="evenodd" d="M8.257 3.099c.765-1.36 2.722-1.36 3.486 0l5.58 9.92c.75 1.334-.213 2.98-1.742 2.98H4.42c-1.53 0-2.493-1.646-1.743-2.98l5.58-9.92zM11 13a1 1 0 11-2 0 1 1 0 012 0zm-1-8a1 1 0 00-1 1v3a1 1 0 002 0V6a1 1 0 00-1-1z" clip-rule="evenodd"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="w-10 h-10 rounded-full bg-blue-100 dark:bg-blue-900/30 flex items-center justify-center">
|
||||||
|
<svg class="w-5 h-5 text-blue-600 dark:text-blue-400" fill="currentColor" viewBox="0 0 20 20">
|
||||||
|
<path fill-rule="evenodd" d="M18 10a8 8 0 11-16 0 8 8 0 0116 0zm-7-4a1 1 0 11-2 0 1 1 0 012 0zM9 9a1 1 0 000 2v3a1 1 0 001 1h1a1 1 0 100-2v-3a1 1 0 00-1-1H9z" clip-rule="evenodd"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Content -->
|
||||||
|
<div class="flex-1 min-w-0">
|
||||||
|
<div class="flex items-center gap-2 mb-1">
|
||||||
|
<h3 class="text-base font-semibold text-gray-900 dark:text-white">
|
||||||
|
{{ item.alert.title }}
|
||||||
|
</h3>
|
||||||
|
<!-- Status badge -->
|
||||||
|
{% if item.alert.status == 'active' %}
|
||||||
|
<span class="px-2 py-0.5 text-xs font-medium rounded-full bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-300">
|
||||||
|
Active
|
||||||
|
</span>
|
||||||
|
{% elif item.alert.status == 'acknowledged' %}
|
||||||
|
<span class="px-2 py-0.5 text-xs font-medium rounded-full bg-yellow-100 text-yellow-700 dark:bg-yellow-900/30 dark:text-yellow-300">
|
||||||
|
Acknowledged
|
||||||
|
</span>
|
||||||
|
{% elif item.alert.status == 'resolved' %}
|
||||||
|
<span class="px-2 py-0.5 text-xs font-medium rounded-full bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-300">
|
||||||
|
Resolved
|
||||||
|
</span>
|
||||||
|
{% elif item.alert.status == 'dismissed' %}
|
||||||
|
<span class="px-2 py-0.5 text-xs font-medium rounded-full bg-gray-100 text-gray-600 dark:bg-gray-700 dark:text-gray-400">
|
||||||
|
Dismissed
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if item.alert.message %}
|
||||||
|
<p class="text-sm text-gray-600 dark:text-gray-300 mb-2">
|
||||||
|
{{ item.alert.message }}
|
||||||
|
</p>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<div class="flex items-center gap-4 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
<span>{{ item.time_ago }}</span>
|
||||||
|
{% if item.alert.unit_id %}
|
||||||
|
<span class="flex items-center gap-1">
|
||||||
|
<svg class="w-3.5 h-3.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 3v2m6-2v2M9 19v2m6-2v2M5 9H3m2 6H3m18-6h-2m2 6h-2M7 19h10a2 2 0 002-2V7a2 2 0 00-2-2H7a2 2 0 00-2 2v10a2 2 0 002 2zM9 9h6v6H9V9z"/>
|
||||||
|
</svg>
|
||||||
|
{{ item.alert.unit_id }}
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
|
<span class="capitalize">{{ item.alert.alert_type | replace('_', ' ') }}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Actions -->
|
||||||
|
{% if item.alert.status == 'active' %}
|
||||||
|
<div class="flex items-center gap-2 flex-shrink-0">
|
||||||
|
<button hx-post="/api/alerts/{{ item.alert.id }}/acknowledge"
|
||||||
|
hx-swap="none"
|
||||||
|
hx-on::after-request="htmx.trigger('#alert-list', 'refresh')"
|
||||||
|
class="px-3 py-1.5 text-sm bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-300 rounded-lg hover:bg-gray-200 dark:hover:bg-gray-600 transition-colors">
|
||||||
|
Acknowledge
|
||||||
|
</button>
|
||||||
|
<button hx-post="/api/alerts/{{ item.alert.id }}/resolve"
|
||||||
|
hx-swap="none"
|
||||||
|
hx-on::after-request="htmx.trigger('#alert-list', 'refresh')"
|
||||||
|
class="px-3 py-1.5 text-sm bg-green-100 dark:bg-green-900/30 text-green-700 dark:text-green-300 rounded-lg hover:bg-green-200 dark:hover:bg-green-900/50 transition-colors">
|
||||||
|
Resolve
|
||||||
|
</button>
|
||||||
|
<button hx-post="/api/alerts/{{ item.alert.id }}/dismiss"
|
||||||
|
hx-swap="none"
|
||||||
|
hx-on::after-request="htmx.trigger('#alert-list', 'refresh')"
|
||||||
|
class="px-3 py-1.5 text-sm text-gray-500 hover:text-red-600 dark:hover:text-red-400 transition-colors"
|
||||||
|
title="Dismiss">
|
||||||
|
<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
{% else %}
|
||||||
|
<div class="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-12 text-center">
|
||||||
|
<svg class="w-16 h-16 mx-auto mb-4 text-gray-300 dark:text-gray-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"/>
|
||||||
|
</svg>
|
||||||
|
<h3 class="text-lg font-medium text-gray-900 dark:text-white mb-2">No alerts</h3>
|
||||||
|
<p class="text-gray-500 dark:text-gray-400">
|
||||||
|
{% if status_filter %}
|
||||||
|
No {{ status_filter }} alerts found.
|
||||||
|
{% else %}
|
||||||
|
All systems are operating normally.
|
||||||
|
{% endif %}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
151
templates/partials/projects/recurring_schedule_list.html
Normal file
151
templates/partials/projects/recurring_schedule_list.html
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
<!-- Recurring Schedule List -->
|
||||||
|
<!-- Displays all recurring schedules for a project -->
|
||||||
|
|
||||||
|
<div class="space-y-4">
|
||||||
|
{% if schedules %}
|
||||||
|
{% for item in schedules %}
|
||||||
|
<div class="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4
|
||||||
|
{% if not item.schedule.enabled %}opacity-60{% endif %}">
|
||||||
|
<div class="flex items-start justify-between gap-4">
|
||||||
|
<div class="flex-1 min-w-0">
|
||||||
|
<div class="flex items-center gap-3 mb-2">
|
||||||
|
<h4 class="text-base font-semibold text-gray-900 dark:text-white">
|
||||||
|
{{ item.schedule.name }}
|
||||||
|
</h4>
|
||||||
|
|
||||||
|
<!-- Type badge -->
|
||||||
|
{% if item.schedule.schedule_type == 'weekly_calendar' %}
|
||||||
|
<span class="px-2 py-0.5 text-xs font-medium rounded-full bg-blue-100 text-blue-700 dark:bg-blue-900/30 dark:text-blue-300">
|
||||||
|
Weekly
|
||||||
|
</span>
|
||||||
|
{% else %}
|
||||||
|
<span class="px-2 py-0.5 text-xs font-medium rounded-full bg-purple-100 text-purple-700 dark:bg-purple-900/30 dark:text-purple-300">
|
||||||
|
24/7 Cycle
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<!-- Status badge -->
|
||||||
|
{% if item.schedule.enabled %}
|
||||||
|
<span class="px-2 py-0.5 text-xs font-medium rounded-full bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-300">
|
||||||
|
Active
|
||||||
|
</span>
|
||||||
|
{% else %}
|
||||||
|
<span class="px-2 py-0.5 text-xs font-medium rounded-full bg-gray-100 text-gray-600 dark:bg-gray-700 dark:text-gray-400">
|
||||||
|
Disabled
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Location info -->
|
||||||
|
{% if item.location %}
|
||||||
|
<div class="text-sm text-gray-600 dark:text-gray-400 mb-2">
|
||||||
|
<span class="text-gray-500">Location:</span>
|
||||||
|
<a href="/projects/{{ project_id }}/nrl/{{ item.location.id }}"
|
||||||
|
class="text-seismo-orange hover:text-seismo-navy font-medium ml-1">
|
||||||
|
{{ item.location.name }}
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<!-- Schedule details -->
|
||||||
|
<div class="text-sm text-gray-500 dark:text-gray-400 space-y-1">
|
||||||
|
{% if item.schedule.schedule_type == 'weekly_calendar' and item.pattern %}
|
||||||
|
<div class="flex flex-wrap gap-2">
|
||||||
|
{% set days = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'] %}
|
||||||
|
{% set day_abbr = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] %}
|
||||||
|
{% for day in days %}
|
||||||
|
{% if item.pattern.get(day, {}).get('enabled') %}
|
||||||
|
<span class="px-2 py-0.5 text-xs bg-gray-100 dark:bg-gray-700 rounded">
|
||||||
|
{{ day_abbr[loop.index0] }}
|
||||||
|
{{ item.pattern[day].get('start', '') }}-{{ item.pattern[day].get('end', '') }}
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% elif item.schedule.schedule_type == 'simple_interval' %}
|
||||||
|
<div>
|
||||||
|
Cycle at {{ item.schedule.cycle_time or '00:00' }} daily
|
||||||
|
{% if item.schedule.include_download %}
|
||||||
|
(with download)
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if item.schedule.next_occurrence %}
|
||||||
|
<div class="text-xs">
|
||||||
|
<span class="text-gray-400">Next:</span>
|
||||||
|
{{ item.schedule.next_occurrence.strftime('%Y-%m-%d %H:%M') }} {{ item.schedule.timezone }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Actions -->
|
||||||
|
<div class="flex items-center gap-2 flex-shrink-0">
|
||||||
|
{% if item.schedule.enabled %}
|
||||||
|
<button hx-post="/api/projects/{{ project_id }}/recurring-schedules/{{ item.schedule.id }}/disable"
|
||||||
|
hx-swap="none"
|
||||||
|
hx-on::after-request="htmx.trigger('#recurring-schedule-list', 'refresh')"
|
||||||
|
class="px-3 py-1.5 text-sm bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-300 rounded-lg hover:bg-gray-200 dark:hover:bg-gray-600 transition-colors">
|
||||||
|
Disable
|
||||||
|
</button>
|
||||||
|
{% else %}
|
||||||
|
<button hx-post="/api/projects/{{ project_id }}/recurring-schedules/{{ item.schedule.id }}/enable"
|
||||||
|
hx-swap="none"
|
||||||
|
hx-on::after-request="htmx.trigger('#recurring-schedule-list', 'refresh')"
|
||||||
|
class="px-3 py-1.5 text-sm bg-green-100 dark:bg-green-900/30 text-green-700 dark:text-green-300 rounded-lg hover:bg-green-200 dark:hover:bg-green-900/50 transition-colors">
|
||||||
|
Enable
|
||||||
|
</button>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<button onclick="editSchedule('{{ item.schedule.id }}')"
|
||||||
|
class="px-3 py-1.5 text-sm bg-seismo-orange text-white rounded-lg hover:bg-seismo-navy transition-colors">
|
||||||
|
Edit
|
||||||
|
</button>
|
||||||
|
|
||||||
|
<button hx-delete="/api/projects/{{ project_id }}/recurring-schedules/{{ item.schedule.id }}"
|
||||||
|
hx-confirm="Delete this recurring schedule?"
|
||||||
|
hx-swap="none"
|
||||||
|
hx-on::after-request="htmx.trigger('#recurring-schedule-list', 'refresh')"
|
||||||
|
class="p-1.5 text-gray-400 hover:text-red-600 dark:hover:text-red-400 transition-colors">
|
||||||
|
<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
{% else %}
|
||||||
|
<div class="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-12 text-center">
|
||||||
|
<svg class="w-16 h-16 mx-auto mb-4 text-gray-300 dark:text-gray-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M8 7V3m8 4V3m-9 8h10M5 21h14a2 2 0 002-2V7a2 2 0 00-2-2H5a2 2 0 00-2 2v12a2 2 0 002 2z"/>
|
||||||
|
</svg>
|
||||||
|
<h3 class="text-lg font-medium text-gray-900 dark:text-white mb-2">No recurring schedules</h3>
|
||||||
|
<p class="text-gray-500 dark:text-gray-400 mb-4">
|
||||||
|
Create a schedule to automate monitoring start/stop times.
|
||||||
|
</p>
|
||||||
|
<button onclick="showCreateScheduleModal()"
|
||||||
|
class="px-4 py-2 bg-seismo-orange text-white rounded-lg hover:bg-seismo-navy transition-colors">
|
||||||
|
Create Schedule
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
function editSchedule(scheduleId) {
|
||||||
|
// For now, redirect to a future edit page or show details
|
||||||
|
// The edit modal will be implemented later
|
||||||
|
alert('Edit schedule: ' + scheduleId + '\n\nNote: Full edit functionality coming soon. For now, you can delete and recreate the schedule.');
|
||||||
|
}
|
||||||
|
|
||||||
|
function showCreateScheduleModal() {
|
||||||
|
// Call the parent page's openScheduleModal function
|
||||||
|
if (typeof openScheduleModal === 'function') {
|
||||||
|
openScheduleModal();
|
||||||
|
} else {
|
||||||
|
alert('Please use the "Create Schedule" button in the Schedules tab.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
231
templates/partials/projects/schedule_calendar.html
Normal file
231
templates/partials/projects/schedule_calendar.html
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
<!-- Weekly Calendar Schedule Editor -->
|
||||||
|
<!-- Used in modals/forms for creating/editing weekly_calendar type schedules -->
|
||||||
|
|
||||||
|
<div id="schedule-calendar-editor" class="space-y-4">
|
||||||
|
<div class="mb-4">
|
||||||
|
<h4 class="text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">Weekly Schedule</h4>
|
||||||
|
<p class="text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
Select which days to monitor and set start/end times for each day.
|
||||||
|
For overnight monitoring (e.g., 7pm to 7am), the end time will be on the following day.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Day rows -->
|
||||||
|
<div class="space-y-3">
|
||||||
|
{% set days = [
|
||||||
|
('monday', 'Monday'),
|
||||||
|
('tuesday', 'Tuesday'),
|
||||||
|
('wednesday', 'Wednesday'),
|
||||||
|
('thursday', 'Thursday'),
|
||||||
|
('friday', 'Friday'),
|
||||||
|
('saturday', 'Saturday'),
|
||||||
|
('sunday', 'Sunday')
|
||||||
|
] %}
|
||||||
|
|
||||||
|
{% for day_key, day_name in days %}
|
||||||
|
<div class="flex items-center gap-4 p-3 bg-gray-50 dark:bg-gray-700/50 rounded-lg">
|
||||||
|
<!-- Day toggle -->
|
||||||
|
<label class="flex items-center gap-2 w-28 cursor-pointer">
|
||||||
|
<input type="checkbox"
|
||||||
|
id="day-{{ day_key }}"
|
||||||
|
name="weekly_pattern[{{ day_key }}][enabled]"
|
||||||
|
class="rounded text-seismo-orange focus:ring-seismo-orange"
|
||||||
|
onchange="toggleDayTimes('{{ day_key }}', this.checked)"
|
||||||
|
{% if pattern and pattern.get(day_key, {}).get('enabled') %}checked{% endif %}>
|
||||||
|
<span class="text-sm font-medium text-gray-700 dark:text-gray-300">{{ day_name }}</span>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<!-- Time inputs -->
|
||||||
|
<div class="flex items-center gap-2 day-times flex-1" id="times-{{ day_key }}"
|
||||||
|
{% if not pattern or not pattern.get(day_key, {}).get('enabled') %}style="opacity: 0.4; pointer-events: none;"{% endif %}>
|
||||||
|
<label class="text-xs text-gray-500 dark:text-gray-400">Start:</label>
|
||||||
|
<input type="time"
|
||||||
|
name="weekly_pattern[{{ day_key }}][start]"
|
||||||
|
value="{{ pattern.get(day_key, {}).get('start', '19:00') if pattern else '19:00' }}"
|
||||||
|
class="px-2 py-1 text-sm border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-700 text-gray-900 dark:text-white focus:ring-seismo-orange focus:border-seismo-orange">
|
||||||
|
|
||||||
|
<span class="text-gray-400 mx-1">to</span>
|
||||||
|
|
||||||
|
<label class="text-xs text-gray-500 dark:text-gray-400">End:</label>
|
||||||
|
<input type="time"
|
||||||
|
name="weekly_pattern[{{ day_key }}][end]"
|
||||||
|
value="{{ pattern.get(day_key, {}).get('end', '07:00') if pattern else '07:00' }}"
|
||||||
|
class="px-2 py-1 text-sm border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-700 text-gray-900 dark:text-white focus:ring-seismo-orange focus:border-seismo-orange">
|
||||||
|
|
||||||
|
<span class="text-xs text-gray-400 dark:text-gray-500 ml-2" id="overnight-hint-{{ day_key }}"
|
||||||
|
style="display: none;">
|
||||||
|
(next day)
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Quick select buttons -->
|
||||||
|
<div class="flex flex-wrap gap-2 pt-2 border-t border-gray-200 dark:border-gray-700">
|
||||||
|
<span class="text-xs text-gray-500 dark:text-gray-400 mr-2">Quick select:</span>
|
||||||
|
<button type="button" onclick="selectWeekdays()"
|
||||||
|
class="px-2 py-1 text-xs bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-300 rounded hover:bg-gray-200 dark:hover:bg-gray-600">
|
||||||
|
Weekdays
|
||||||
|
</button>
|
||||||
|
<button type="button" onclick="selectWeekends()"
|
||||||
|
class="px-2 py-1 text-xs bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-300 rounded hover:bg-gray-200 dark:hover:bg-gray-600">
|
||||||
|
Weekends
|
||||||
|
</button>
|
||||||
|
<button type="button" onclick="selectAllDays()"
|
||||||
|
class="px-2 py-1 text-xs bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-300 rounded hover:bg-gray-200 dark:hover:bg-gray-600">
|
||||||
|
All Days
|
||||||
|
</button>
|
||||||
|
<button type="button" onclick="clearAllDays()"
|
||||||
|
class="px-2 py-1 text-xs bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-300 rounded hover:bg-gray-200 dark:hover:bg-gray-600">
|
||||||
|
Clear All
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Automation Options -->
|
||||||
|
<div class="pt-4 border-t border-gray-200 dark:border-gray-700">
|
||||||
|
<h5 class="text-sm font-medium text-gray-700 dark:text-gray-300 mb-3">Automation Options</h5>
|
||||||
|
|
||||||
|
<div class="space-y-3">
|
||||||
|
<!-- Download data option -->
|
||||||
|
<div class="bg-gray-50 dark:bg-gray-700/50 rounded-lg p-4">
|
||||||
|
<label class="flex items-start gap-3 cursor-pointer">
|
||||||
|
<input type="checkbox"
|
||||||
|
name="include_download"
|
||||||
|
id="include_download_calendar"
|
||||||
|
class="rounded text-seismo-orange focus:ring-seismo-orange mt-0.5"
|
||||||
|
{% if include_download is not defined or include_download %}checked{% endif %}>
|
||||||
|
<div>
|
||||||
|
<span class="text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
Download data after each monitoring period
|
||||||
|
</span>
|
||||||
|
<p class="text-xs text-gray-500 dark:text-gray-400 mt-1">
|
||||||
|
When enabled, measurement data will be downloaded via FTP after each stop.
|
||||||
|
Disable if you prefer to download manually or if FTP is not configured.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Auto-increment index option -->
|
||||||
|
<div class="bg-gray-50 dark:bg-gray-700/50 rounded-lg p-4">
|
||||||
|
<label class="flex items-start gap-3 cursor-pointer">
|
||||||
|
<input type="checkbox"
|
||||||
|
name="auto_increment_index"
|
||||||
|
id="auto_increment_index_calendar"
|
||||||
|
class="rounded text-seismo-orange focus:ring-seismo-orange mt-0.5"
|
||||||
|
{% if auto_increment_index is not defined or auto_increment_index %}checked{% endif %}>
|
||||||
|
<div>
|
||||||
|
<span class="text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
Auto-increment store index before each start
|
||||||
|
</span>
|
||||||
|
<p class="text-xs text-gray-500 dark:text-gray-400 mt-1">
|
||||||
|
When enabled, the system will find an unused store/index number before starting.
|
||||||
|
This prevents "overwrite existing data?" prompts on the device.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
const days = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'];
|
||||||
|
const weekdays = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday'];
|
||||||
|
const weekends = ['saturday', 'sunday'];
|
||||||
|
|
||||||
|
function toggleDayTimes(day, enabled) {
|
||||||
|
const timesDiv = document.getElementById('times-' + day);
|
||||||
|
if (enabled) {
|
||||||
|
timesDiv.style.opacity = '1';
|
||||||
|
timesDiv.style.pointerEvents = 'auto';
|
||||||
|
} else {
|
||||||
|
timesDiv.style.opacity = '0.4';
|
||||||
|
timesDiv.style.pointerEvents = 'none';
|
||||||
|
}
|
||||||
|
updateOvernightHints();
|
||||||
|
}
|
||||||
|
|
||||||
|
function setDayEnabled(day, enabled) {
|
||||||
|
const checkbox = document.getElementById('day-' + day);
|
||||||
|
checkbox.checked = enabled;
|
||||||
|
toggleDayTimes(day, enabled);
|
||||||
|
}
|
||||||
|
|
||||||
|
function selectWeekdays() {
|
||||||
|
days.forEach(day => setDayEnabled(day, weekdays.includes(day)));
|
||||||
|
}
|
||||||
|
|
||||||
|
function selectWeekends() {
|
||||||
|
days.forEach(day => setDayEnabled(day, weekends.includes(day)));
|
||||||
|
}
|
||||||
|
|
||||||
|
function selectAllDays() {
|
||||||
|
days.forEach(day => setDayEnabled(day, true));
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearAllDays() {
|
||||||
|
days.forEach(day => setDayEnabled(day, false));
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateOvernightHints() {
|
||||||
|
days.forEach(day => {
|
||||||
|
const startInput = document.querySelector(`input[name="weekly_pattern[${day}][start]"]`);
|
||||||
|
const endInput = document.querySelector(`input[name="weekly_pattern[${day}][end]"]`);
|
||||||
|
const hint = document.getElementById('overnight-hint-' + day);
|
||||||
|
|
||||||
|
if (startInput && endInput && hint) {
|
||||||
|
const start = startInput.value;
|
||||||
|
const end = endInput.value;
|
||||||
|
hint.style.display = (end && start && end <= start) ? 'inline' : 'none';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update hints on time change
|
||||||
|
document.querySelectorAll('input[type="time"]').forEach(input => {
|
||||||
|
input.addEventListener('change', updateOvernightHints);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Initial update
|
||||||
|
updateOvernightHints();
|
||||||
|
|
||||||
|
// Function to collect form data as JSON
|
||||||
|
function getWeeklyPatternData() {
|
||||||
|
const pattern = {};
|
||||||
|
days.forEach(day => {
|
||||||
|
const checkbox = document.getElementById('day-' + day);
|
||||||
|
const startInput = document.querySelector(`input[name="weekly_pattern[${day}][start]"]`);
|
||||||
|
const endInput = document.querySelector(`input[name="weekly_pattern[${day}][end]"]`);
|
||||||
|
|
||||||
|
pattern[day] = {
|
||||||
|
enabled: checkbox.checked,
|
||||||
|
start: startInput.value,
|
||||||
|
end: endInput.value
|
||||||
|
};
|
||||||
|
});
|
||||||
|
return pattern;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to get auto-increment setting for calendar mode
|
||||||
|
function getCalendarAutoIncrement() {
|
||||||
|
const checkbox = document.getElementById('auto_increment_index_calendar');
|
||||||
|
return checkbox ? checkbox.checked : true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to get include_download setting for calendar mode
|
||||||
|
function getCalendarIncludeDownload() {
|
||||||
|
const checkbox = document.getElementById('include_download_calendar');
|
||||||
|
return checkbox ? checkbox.checked : true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to get all calendar options as object
|
||||||
|
function getCalendarOptions() {
|
||||||
|
return {
|
||||||
|
weekly_pattern: getWeeklyPatternData(),
|
||||||
|
auto_increment_index: getCalendarAutoIncrement(),
|
||||||
|
include_download: getCalendarIncludeDownload()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
</script>
|
||||||
158
templates/partials/projects/schedule_interval.html
Normal file
158
templates/partials/projects/schedule_interval.html
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
<!-- Simple Interval Schedule Editor -->
|
||||||
|
<!-- Used for 24/7 continuous monitoring with daily stop/download/restart cycles -->
|
||||||
|
|
||||||
|
<div id="schedule-interval-editor" class="space-y-4">
|
||||||
|
<div class="mb-4">
|
||||||
|
<h4 class="text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">Continuous Monitoring (24/7)</h4>
|
||||||
|
<p class="text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
For uninterrupted monitoring. The device will automatically stop, download data,
|
||||||
|
and restart at the configured cycle time each day.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Info box -->
|
||||||
|
<div class="bg-blue-50 dark:bg-blue-900/20 border border-blue-200 dark:border-blue-800 rounded-lg p-4">
|
||||||
|
<div class="flex gap-3">
|
||||||
|
<svg class="w-5 h-5 text-blue-500 flex-shrink-0 mt-0.5" fill="currentColor" viewBox="0 0 20 20">
|
||||||
|
<path fill-rule="evenodd" d="M18 10a8 8 0 11-16 0 8 8 0 0116 0zm-7-4a1 1 0 11-2 0 1 1 0 012 0zM9 9a1 1 0 000 2v3a1 1 0 001 1h1a1 1 0 100-2v-3a1 1 0 00-1-1H9z" clip-rule="evenodd"/>
|
||||||
|
</svg>
|
||||||
|
<div class="text-sm text-blue-700 dark:text-blue-300">
|
||||||
|
<p class="font-medium mb-1">How it works:</p>
|
||||||
|
<ol class="list-decimal list-inside space-y-1 text-xs">
|
||||||
|
<li>At the cycle time, the measurement will <strong>stop</strong></li>
|
||||||
|
<li>If enabled, data will be <strong>downloaded</strong> via FTP</li>
|
||||||
|
<li>The measurement will <strong>restart</strong> automatically</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Cycle time -->
|
||||||
|
<div class="bg-gray-50 dark:bg-gray-700/50 rounded-lg p-4">
|
||||||
|
<label class="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">
|
||||||
|
Daily Cycle Time
|
||||||
|
</label>
|
||||||
|
<div class="flex items-center gap-4">
|
||||||
|
<input type="time"
|
||||||
|
name="cycle_time"
|
||||||
|
id="cycle_time"
|
||||||
|
value="{{ cycle_time or '00:00' }}"
|
||||||
|
class="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-700 text-gray-900 dark:text-white focus:ring-seismo-orange focus:border-seismo-orange">
|
||||||
|
<span class="text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
Time when stop/download/restart cycle runs
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<p class="text-xs text-gray-400 dark:text-gray-500 mt-2">
|
||||||
|
Recommended: midnight (00:00) to minimize disruption to data collection
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Download option -->
|
||||||
|
<div class="bg-gray-50 dark:bg-gray-700/50 rounded-lg p-4">
|
||||||
|
<label class="flex items-start gap-3 cursor-pointer">
|
||||||
|
<input type="checkbox"
|
||||||
|
name="include_download"
|
||||||
|
id="include_download"
|
||||||
|
class="rounded text-seismo-orange focus:ring-seismo-orange mt-0.5"
|
||||||
|
{% if include_download is not defined or include_download %}checked{% endif %}>
|
||||||
|
<div>
|
||||||
|
<span class="text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
Download data before restart
|
||||||
|
</span>
|
||||||
|
<p class="text-xs text-gray-500 dark:text-gray-400 mt-1">
|
||||||
|
When enabled, measurement data will be downloaded via FTP during the cycle.
|
||||||
|
Disable if you prefer to download manually or if FTP is not configured.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Auto-increment index option -->
|
||||||
|
<div class="bg-gray-50 dark:bg-gray-700/50 rounded-lg p-4">
|
||||||
|
<label class="flex items-start gap-3 cursor-pointer">
|
||||||
|
<input type="checkbox"
|
||||||
|
name="auto_increment_index"
|
||||||
|
id="auto_increment_index_interval"
|
||||||
|
class="rounded text-seismo-orange focus:ring-seismo-orange mt-0.5"
|
||||||
|
{% if auto_increment_index is not defined or auto_increment_index %}checked{% endif %}>
|
||||||
|
<div>
|
||||||
|
<span class="text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
Auto-increment store index before restart
|
||||||
|
</span>
|
||||||
|
<p class="text-xs text-gray-500 dark:text-gray-400 mt-1">
|
||||||
|
When enabled, the store/index number is incremented before starting a new measurement.
|
||||||
|
This prevents "overwrite existing data?" prompts on the device.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Interval type (hidden for now, default to daily) -->
|
||||||
|
<input type="hidden" name="interval_type" value="daily">
|
||||||
|
|
||||||
|
<!-- Cycle preview -->
|
||||||
|
<div class="bg-gray-50 dark:bg-gray-700/50 rounded-lg p-4">
|
||||||
|
<h5 class="text-sm font-medium text-gray-700 dark:text-gray-300 mb-3">Cycle Sequence Preview</h5>
|
||||||
|
<div class="flex items-center gap-2 text-sm">
|
||||||
|
<div class="flex items-center gap-2">
|
||||||
|
<span class="w-6 h-6 rounded-full bg-red-100 dark:bg-red-900/30 flex items-center justify-center text-xs text-red-700 dark:text-red-300">1</span>
|
||||||
|
<span class="text-gray-600 dark:text-gray-400">Stop</span>
|
||||||
|
</div>
|
||||||
|
<svg class="w-4 h-4 text-gray-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 5l7 7-7 7"/>
|
||||||
|
</svg>
|
||||||
|
<div class="flex items-center gap-2" id="download-step">
|
||||||
|
<span class="w-6 h-6 rounded-full bg-blue-100 dark:bg-blue-900/30 flex items-center justify-center text-xs text-blue-700 dark:text-blue-300">2</span>
|
||||||
|
<span class="text-gray-600 dark:text-gray-400">Download</span>
|
||||||
|
</div>
|
||||||
|
<svg class="w-4 h-4 text-gray-400" id="download-arrow" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 5l7 7-7 7"/>
|
||||||
|
</svg>
|
||||||
|
<div class="flex items-center gap-2">
|
||||||
|
<span class="w-6 h-6 rounded-full bg-green-100 dark:bg-green-900/30 flex items-center justify-center text-xs text-green-700 dark:text-green-300" id="start-step-num">3</span>
|
||||||
|
<span class="text-gray-600 dark:text-gray-400">Start</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<p class="text-xs text-gray-400 dark:text-gray-500 mt-3" id="cycle-timing">
|
||||||
|
At <span id="preview-time">00:00</span>: Stop → Download (1 min) → Start (2 min)
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
// Update preview when download checkbox changes
|
||||||
|
document.getElementById('include_download').addEventListener('change', function() {
|
||||||
|
const downloadStep = document.getElementById('download-step');
|
||||||
|
const downloadArrow = document.getElementById('download-arrow');
|
||||||
|
const startStepNum = document.getElementById('start-step-num');
|
||||||
|
const cycleTiming = document.getElementById('cycle-timing');
|
||||||
|
const timeValue = document.getElementById('cycle_time').value || '00:00';
|
||||||
|
|
||||||
|
if (this.checked) {
|
||||||
|
downloadStep.style.display = 'flex';
|
||||||
|
downloadArrow.style.display = 'block';
|
||||||
|
startStepNum.textContent = '3';
|
||||||
|
cycleTiming.innerHTML = `At <span id="preview-time">${timeValue}</span>: Stop → Download (1 min) → Start (2 min)`;
|
||||||
|
} else {
|
||||||
|
downloadStep.style.display = 'none';
|
||||||
|
downloadArrow.style.display = 'none';
|
||||||
|
startStepNum.textContent = '2';
|
||||||
|
cycleTiming.innerHTML = `At <span id="preview-time">${timeValue}</span>: Stop → Start (1 min)`;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update preview time when cycle time changes
|
||||||
|
document.getElementById('cycle_time').addEventListener('change', function() {
|
||||||
|
document.getElementById('preview-time').textContent = this.value || '00:00';
|
||||||
|
});
|
||||||
|
|
||||||
|
// Function to get interval data as object
|
||||||
|
function getIntervalData() {
|
||||||
|
return {
|
||||||
|
interval_type: 'daily',
|
||||||
|
cycle_time: document.getElementById('cycle_time').value,
|
||||||
|
include_download: document.getElementById('include_download').checked,
|
||||||
|
auto_increment_index: document.getElementById('auto_increment_index_interval').checked
|
||||||
|
};
|
||||||
|
}
|
||||||
|
</script>
|
||||||
@@ -132,23 +132,55 @@
|
|||||||
|
|
||||||
<!-- Schedules Tab -->
|
<!-- Schedules Tab -->
|
||||||
<div id="schedules-tab" class="tab-panel hidden">
|
<div id="schedules-tab" class="tab-panel hidden">
|
||||||
<div class="bg-white dark:bg-slate-800 rounded-xl shadow-lg p-6">
|
<!-- Recurring Schedules Section -->
|
||||||
|
<div class="bg-white dark:bg-slate-800 rounded-xl shadow-lg p-6 mb-6">
|
||||||
<div class="flex items-center justify-between mb-6">
|
<div class="flex items-center justify-between mb-6">
|
||||||
<h2 class="text-xl font-semibold text-gray-900 dark:text-white">Scheduled Actions</h2>
|
<div>
|
||||||
|
<h2 class="text-xl font-semibold text-gray-900 dark:text-white">Recurring Schedules</h2>
|
||||||
|
<p class="text-sm text-gray-500 dark:text-gray-400 mt-1">
|
||||||
|
Automated patterns that generate scheduled actions
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
<button onclick="openScheduleModal()"
|
<button onclick="openScheduleModal()"
|
||||||
class="px-4 py-2 bg-seismo-orange text-white rounded-lg hover:bg-seismo-navy transition-colors">
|
class="px-4 py-2 bg-seismo-orange text-white rounded-lg hover:bg-seismo-navy transition-colors">
|
||||||
<svg class="w-5 h-5 inline mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
<svg class="w-5 h-5 inline mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 4v16m8-8H4"></path>
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 4v16m8-8H4"></path>
|
||||||
</svg>
|
</svg>
|
||||||
Schedule Action
|
Create Schedule
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="project-schedules"
|
<div id="recurring-schedule-list"
|
||||||
hx-get="/api/projects/{{ project_id }}/schedules"
|
hx-get="/api/projects/{{ project_id }}/recurring-schedules/partials/list"
|
||||||
hx-trigger="load, every 30s"
|
hx-trigger="load, refresh from:#recurring-schedule-list"
|
||||||
hx-swap="innerHTML">
|
hx-swap="innerHTML">
|
||||||
<div class="text-center py-8 text-gray-500">Loading schedules...</div>
|
<div class="text-center py-8 text-gray-500">Loading recurring schedules...</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Scheduled Actions Section -->
|
||||||
|
<div class="bg-white dark:bg-slate-800 rounded-xl shadow-lg p-6">
|
||||||
|
<div class="flex items-center justify-between mb-6">
|
||||||
|
<div>
|
||||||
|
<h2 class="text-xl font-semibold text-gray-900 dark:text-white">Upcoming Actions</h2>
|
||||||
|
<p class="text-sm text-gray-500 dark:text-gray-400 mt-1">
|
||||||
|
Individual scheduled start/stop/download actions
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<select id="schedules-filter" onchange="filterScheduledActions()"
|
||||||
|
class="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-700 text-gray-900 dark:text-white text-sm">
|
||||||
|
<option value="pending">Pending</option>
|
||||||
|
<option value="all">All</option>
|
||||||
|
<option value="completed">Completed</option>
|
||||||
|
<option value="failed">Failed</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="project-schedules"
|
||||||
|
hx-get="/api/projects/{{ project_id }}/schedules?status=pending"
|
||||||
|
hx-trigger="load, every 30s, refresh from:#project-schedules"
|
||||||
|
hx-swap="innerHTML">
|
||||||
|
<div class="text-center py-8 text-gray-500">Loading scheduled actions...</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -378,6 +410,122 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Schedule Modal -->
|
||||||
|
<div id="schedule-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 z-50 flex items-center justify-center">
|
||||||
|
<div class="bg-white dark:bg-slate-800 rounded-xl shadow-2xl w-full max-w-3xl max-h-[90vh] overflow-y-auto m-4">
|
||||||
|
<div class="p-6 border-b border-gray-200 dark:border-gray-700 flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<h2 class="text-2xl font-bold text-gray-900 dark:text-white">Create Recurring Schedule</h2>
|
||||||
|
<p class="text-gray-600 dark:text-gray-400 mt-1">Set up automated monitoring schedules</p>
|
||||||
|
</div>
|
||||||
|
<button onclick="closeScheduleModal()" class="text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200">
|
||||||
|
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"></path>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<form id="schedule-form" class="p-6 space-y-6">
|
||||||
|
<!-- Schedule Name -->
|
||||||
|
<div>
|
||||||
|
<label class="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">Schedule Name</label>
|
||||||
|
<input type="text" name="schedule_name" id="schedule-name"
|
||||||
|
placeholder="e.g., Weeknight Monitoring"
|
||||||
|
class="w-full px-4 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-700 text-gray-900 dark:text-white" required>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Location Selection (Multiple) -->
|
||||||
|
<div>
|
||||||
|
<label class="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">
|
||||||
|
Locations
|
||||||
|
<span class="text-xs font-normal text-gray-500 ml-2">(select one or more)</span>
|
||||||
|
</label>
|
||||||
|
<div id="schedule-locations-container"
|
||||||
|
class="max-h-48 overflow-y-auto border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-700 p-2 space-y-1">
|
||||||
|
<div class="text-gray-500 text-sm py-2 text-center">Loading locations...</div>
|
||||||
|
</div>
|
||||||
|
<p id="schedule-location-empty" class="hidden text-xs text-gray-500 mt-2">
|
||||||
|
No locations available. Create a location first.
|
||||||
|
</p>
|
||||||
|
<p id="schedule-location-error" class="hidden text-xs text-red-500 mt-2">
|
||||||
|
Please select at least one location.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Schedule Type Selection -->
|
||||||
|
<div>
|
||||||
|
<label class="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-3">Schedule Type</label>
|
||||||
|
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||||
|
<label class="relative cursor-pointer">
|
||||||
|
<input type="radio" name="schedule_type" value="weekly_calendar" class="peer sr-only" checked onchange="toggleScheduleType('weekly_calendar')">
|
||||||
|
<div class="p-4 border-2 border-gray-200 dark:border-gray-600 rounded-lg peer-checked:border-seismo-orange peer-checked:bg-orange-50 dark:peer-checked:bg-orange-900/20 transition-colors">
|
||||||
|
<div class="flex items-center gap-3 mb-2">
|
||||||
|
<svg class="w-6 h-6 text-gray-500 peer-checked:text-seismo-orange" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M8 7V3m8 4V3m-9 8h10M5 21h14a2 2 0 002-2V7a2 2 0 00-2-2H5a2 2 0 00-2 2v12a2 2 0 002 2z"/>
|
||||||
|
</svg>
|
||||||
|
<span class="font-medium text-gray-900 dark:text-white">Weekly Calendar</span>
|
||||||
|
</div>
|
||||||
|
<p class="text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
Select specific days with start/end times. Ideal for weeknight monitoring (Mon-Fri 7pm-7am).
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
<label class="relative cursor-pointer">
|
||||||
|
<input type="radio" name="schedule_type" value="simple_interval" class="peer sr-only" onchange="toggleScheduleType('simple_interval')">
|
||||||
|
<div class="p-4 border-2 border-gray-200 dark:border-gray-600 rounded-lg peer-checked:border-seismo-orange peer-checked:bg-orange-50 dark:peer-checked:bg-orange-900/20 transition-colors">
|
||||||
|
<div class="flex items-center gap-3 mb-2">
|
||||||
|
<svg class="w-6 h-6 text-gray-500 peer-checked:text-seismo-orange" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z"/>
|
||||||
|
</svg>
|
||||||
|
<span class="font-medium text-gray-900 dark:text-white">24/7 Continuous</span>
|
||||||
|
</div>
|
||||||
|
<p class="text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
Continuous monitoring with daily stop/download/restart cycle at a set time.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Weekly Calendar Editor -->
|
||||||
|
<div id="schedule-weekly-wrapper">
|
||||||
|
{% include "partials/projects/schedule_calendar.html" %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Simple Interval Editor -->
|
||||||
|
<div id="schedule-interval-wrapper" class="hidden">
|
||||||
|
{% include "partials/projects/schedule_interval.html" %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Timezone -->
|
||||||
|
<div>
|
||||||
|
<label class="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-2">Timezone</label>
|
||||||
|
<select name="timezone" id="schedule-timezone"
|
||||||
|
class="w-full px-4 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-700 text-gray-900 dark:text-white">
|
||||||
|
<option value="America/New_York">Eastern (America/New_York)</option>
|
||||||
|
<option value="America/Chicago">Central (America/Chicago)</option>
|
||||||
|
<option value="America/Denver">Mountain (America/Denver)</option>
|
||||||
|
<option value="America/Los_Angeles">Pacific (America/Los_Angeles)</option>
|
||||||
|
<option value="UTC">UTC</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="schedule-error" class="hidden text-sm text-red-600"></div>
|
||||||
|
|
||||||
|
<div class="flex justify-end gap-3 pt-2">
|
||||||
|
<button type="button" onclick="closeScheduleModal()"
|
||||||
|
class="px-6 py-2 border border-gray-300 dark:border-gray-600 rounded-lg text-gray-700 dark:text-gray-300 hover:bg-gray-50 dark:hover:bg-gray-700">
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button type="submit"
|
||||||
|
class="px-6 py-2 bg-seismo-orange hover:bg-seismo-navy text-white rounded-lg font-medium">
|
||||||
|
Create Schedule
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- Assign Unit Modal -->
|
<!-- Assign Unit Modal -->
|
||||||
<div id="assign-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 z-50 flex items-center justify-center">
|
<div id="assign-modal" class="hidden fixed inset-0 bg-black bg-opacity-50 z-50 flex items-center justify-center">
|
||||||
<div class="bg-white dark:bg-slate-800 rounded-xl shadow-2xl w-full max-w-2xl max-h-[90vh] overflow-y-auto m-4">
|
<div class="bg-white dark:bg-slate-800 rounded-xl shadow-2xl w-full max-w-2xl max-h-[90vh] overflow-y-auto m-4">
|
||||||
@@ -809,11 +957,19 @@ function filterFiles() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Utility functions
|
function filterScheduledActions() {
|
||||||
function openScheduleModal() {
|
const filter = document.getElementById('schedules-filter').value;
|
||||||
alert('Schedule modal coming soon');
|
const url = filter === 'all'
|
||||||
|
? `/api/projects/${projectId}/schedules`
|
||||||
|
: `/api/projects/${projectId}/schedules?status=${filter}`;
|
||||||
|
|
||||||
|
htmx.ajax('GET', url, {
|
||||||
|
target: '#project-schedules',
|
||||||
|
swap: 'innerHTML'
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Utility functions
|
||||||
function exportProjectData() {
|
function exportProjectData() {
|
||||||
window.location.href = `/api/projects/${projectId}/export`;
|
window.location.href = `/api/projects/${projectId}/export`;
|
||||||
}
|
}
|
||||||
@@ -825,11 +981,239 @@ function archiveProject() {
|
|||||||
document.getElementById('project-settings-form').dispatchEvent(new Event('submit'));
|
document.getElementById('project-settings-form').dispatchEvent(new Event('submit'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Schedule Modal Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async function openScheduleModal() {
|
||||||
|
// Reset form
|
||||||
|
document.getElementById('schedule-name').value = '';
|
||||||
|
document.getElementById('schedule-locations-container').innerHTML = '<div class="text-gray-500 text-sm py-2 text-center">Loading locations...</div>';
|
||||||
|
document.getElementById('schedule-location-empty').classList.add('hidden');
|
||||||
|
document.getElementById('schedule-location-error').classList.add('hidden');
|
||||||
|
document.getElementById('schedule-error').classList.add('hidden');
|
||||||
|
|
||||||
|
// Reset to weekly calendar type
|
||||||
|
document.querySelector('input[name="schedule_type"][value="weekly_calendar"]').checked = true;
|
||||||
|
toggleScheduleType('weekly_calendar');
|
||||||
|
|
||||||
|
// Reset calendar checkboxes
|
||||||
|
if (typeof clearAllDays === 'function') {
|
||||||
|
clearAllDays();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show modal
|
||||||
|
document.getElementById('schedule-modal').classList.remove('hidden');
|
||||||
|
|
||||||
|
// Load locations
|
||||||
|
await loadScheduleLocations();
|
||||||
|
}
|
||||||
|
|
||||||
|
function closeScheduleModal() {
|
||||||
|
document.getElementById('schedule-modal').classList.add('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadScheduleLocations() {
|
||||||
|
const container = document.getElementById('schedule-locations-container');
|
||||||
|
const emptyMsg = document.getElementById('schedule-location-empty');
|
||||||
|
const errorMsg = document.getElementById('schedule-location-error');
|
||||||
|
|
||||||
|
// Reset state
|
||||||
|
emptyMsg.classList.add('hidden');
|
||||||
|
errorMsg.classList.add('hidden');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/projects/${projectId}/locations-json`);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error('Failed to load locations');
|
||||||
|
}
|
||||||
|
const locations = await response.json();
|
||||||
|
|
||||||
|
if (!locations.length) {
|
||||||
|
container.innerHTML = '<div class="text-gray-500 text-sm py-2 text-center">No locations available</div>';
|
||||||
|
emptyMsg.classList.remove('hidden');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build checkboxes for each location
|
||||||
|
container.innerHTML = locations.map(loc => `
|
||||||
|
<label class="flex items-center gap-3 p-2 rounded hover:bg-gray-50 dark:hover:bg-gray-600 cursor-pointer">
|
||||||
|
<input type="checkbox"
|
||||||
|
name="schedule_locations"
|
||||||
|
value="${loc.id}"
|
||||||
|
data-name="${loc.name}"
|
||||||
|
data-type="${loc.location_type}"
|
||||||
|
class="rounded text-seismo-orange focus:ring-seismo-orange">
|
||||||
|
<span class="text-sm text-gray-900 dark:text-white">${loc.name}</span>
|
||||||
|
<span class="text-xs text-gray-500 dark:text-gray-400">(${loc.location_type})</span>
|
||||||
|
</label>
|
||||||
|
`).join('');
|
||||||
|
|
||||||
|
// Add select all / clear all buttons if more than one location
|
||||||
|
if (locations.length > 1) {
|
||||||
|
container.insertAdjacentHTML('afterbegin', `
|
||||||
|
<div class="flex gap-2 pb-2 mb-2 border-b border-gray-200 dark:border-gray-600">
|
||||||
|
<button type="button" onclick="selectAllLocations()" class="text-xs text-seismo-orange hover:underline">Select All</button>
|
||||||
|
<span class="text-gray-400">|</span>
|
||||||
|
<button type="button" onclick="clearAllLocations()" class="text-xs text-gray-500 hover:underline">Clear All</button>
|
||||||
|
</div>
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to load locations:', err);
|
||||||
|
container.innerHTML = '<div class="text-red-500 text-sm py-2 text-center">Error loading locations</div>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function selectAllLocations() {
|
||||||
|
document.querySelectorAll('input[name="schedule_locations"]').forEach(cb => cb.checked = true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearAllLocations() {
|
||||||
|
document.querySelectorAll('input[name="schedule_locations"]').forEach(cb => cb.checked = false);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSelectedLocationIds() {
|
||||||
|
const checkboxes = document.querySelectorAll('input[name="schedule_locations"]:checked');
|
||||||
|
return Array.from(checkboxes).map(cb => cb.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function toggleScheduleType(type) {
|
||||||
|
const weeklyEditor = document.getElementById('schedule-weekly-wrapper');
|
||||||
|
const intervalEditor = document.getElementById('schedule-interval-wrapper');
|
||||||
|
|
||||||
|
if (type === 'weekly_calendar') {
|
||||||
|
weeklyEditor.classList.remove('hidden');
|
||||||
|
intervalEditor.classList.add('hidden');
|
||||||
|
} else {
|
||||||
|
weeklyEditor.classList.add('hidden');
|
||||||
|
intervalEditor.classList.remove('hidden');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Schedule form submission
|
||||||
|
document.getElementById('schedule-form').addEventListener('submit', async function(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
|
||||||
|
const name = document.getElementById('schedule-name').value.trim();
|
||||||
|
const locationIds = getSelectedLocationIds();
|
||||||
|
const scheduleType = document.querySelector('input[name="schedule_type"]:checked').value;
|
||||||
|
const timezone = document.getElementById('schedule-timezone').value;
|
||||||
|
|
||||||
|
// Hide previous errors
|
||||||
|
document.getElementById('schedule-location-error').classList.add('hidden');
|
||||||
|
|
||||||
|
if (!name) {
|
||||||
|
showScheduleError('Please enter a schedule name.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!locationIds.length) {
|
||||||
|
document.getElementById('schedule-location-error').classList.remove('hidden');
|
||||||
|
showScheduleError('Please select at least one location.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build payload based on schedule type
|
||||||
|
const payload = {
|
||||||
|
name: name,
|
||||||
|
location_ids: locationIds, // Array of location IDs
|
||||||
|
schedule_type: scheduleType,
|
||||||
|
timezone: timezone,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (scheduleType === 'weekly_calendar') {
|
||||||
|
// Get weekly pattern from the calendar editor
|
||||||
|
if (typeof getWeeklyPatternData === 'function') {
|
||||||
|
payload.weekly_pattern = getWeeklyPatternData();
|
||||||
|
} else {
|
||||||
|
showScheduleError('Calendar editor not loaded properly.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate at least one day is selected
|
||||||
|
const hasEnabledDay = Object.values(payload.weekly_pattern).some(day => day.enabled);
|
||||||
|
if (!hasEnabledDay) {
|
||||||
|
showScheduleError('Please select at least one day for monitoring.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get auto-increment setting for calendar mode
|
||||||
|
if (typeof getCalendarAutoIncrement === 'function') {
|
||||||
|
payload.auto_increment_index = getCalendarAutoIncrement();
|
||||||
|
} else {
|
||||||
|
payload.auto_increment_index = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get include_download setting for calendar mode
|
||||||
|
if (typeof getCalendarIncludeDownload === 'function') {
|
||||||
|
payload.include_download = getCalendarIncludeDownload();
|
||||||
|
} else {
|
||||||
|
payload.include_download = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Get interval data
|
||||||
|
if (typeof getIntervalData === 'function') {
|
||||||
|
const intervalData = getIntervalData();
|
||||||
|
payload.interval_type = intervalData.interval_type;
|
||||||
|
payload.cycle_time = intervalData.cycle_time;
|
||||||
|
payload.include_download = intervalData.include_download;
|
||||||
|
payload.auto_increment_index = intervalData.auto_increment_index;
|
||||||
|
} else {
|
||||||
|
showScheduleError('Interval editor not loaded properly.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/projects/${projectId}/recurring-schedules/`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(payload)
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const data = await response.json().catch(() => ({}));
|
||||||
|
throw new Error(data.detail || 'Failed to create schedule');
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
// Close modal and refresh schedules list
|
||||||
|
closeScheduleModal();
|
||||||
|
|
||||||
|
// Refresh both the recurring schedules list and scheduled actions
|
||||||
|
htmx.ajax('GET', `/api/projects/${projectId}/recurring-schedules/partials/list`, {
|
||||||
|
target: '#recurring-schedule-list',
|
||||||
|
swap: 'innerHTML'
|
||||||
|
});
|
||||||
|
htmx.ajax('GET', `/api/projects/${projectId}/schedules?status=pending`, {
|
||||||
|
target: '#project-schedules',
|
||||||
|
swap: 'innerHTML'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Show success message
|
||||||
|
console.log('Schedule(s) created:', result.message);
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
showScheduleError(err.message || 'Failed to create schedule.');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function showScheduleError(message) {
|
||||||
|
const errorEl = document.getElementById('schedule-error');
|
||||||
|
errorEl.textContent = message;
|
||||||
|
errorEl.classList.remove('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Keyboard shortcuts
|
// Keyboard shortcuts
|
||||||
|
// ============================================================================
|
||||||
document.addEventListener('keydown', function(e) {
|
document.addEventListener('keydown', function(e) {
|
||||||
if (e.key === 'Escape') {
|
if (e.key === 'Escape') {
|
||||||
closeLocationModal();
|
closeLocationModal();
|
||||||
closeAssignModal();
|
closeAssignModal();
|
||||||
|
closeScheduleModal();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -846,6 +1230,12 @@ document.getElementById('assign-modal')?.addEventListener('click', function(e) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
document.getElementById('schedule-modal')?.addEventListener('click', function(e) {
|
||||||
|
if (e.target === this) {
|
||||||
|
closeScheduleModal();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// Load project details on page load
|
// Load project details on page load
|
||||||
document.addEventListener('DOMContentLoaded', function() {
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
loadProjectDetails();
|
loadProjectDetails();
|
||||||
|
|||||||
Reference in New Issue
Block a user