Feat: Scheduler implemented, WIP
This commit is contained in:
327
backend/routers/alerts.py
Normal file
327
backend/routers/alerts.py
Normal file
@@ -0,0 +1,327 @@
|
||||
"""
|
||||
Alerts Router
|
||||
|
||||
API endpoints for managing in-app alerts.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, Query
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from fastapi.responses import HTMLResponse, JSONResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import Optional
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.models import Alert, RosterUnit
|
||||
from backend.services.alert_service import get_alert_service
|
||||
|
||||
router = APIRouter(prefix="/api/alerts", tags=["alerts"])
|
||||
templates = Jinja2Templates(directory="templates")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Alert List and Count
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/")
|
||||
async def list_alerts(
|
||||
db: Session = Depends(get_db),
|
||||
status: Optional[str] = Query(None, description="Filter by status: active, acknowledged, resolved, dismissed"),
|
||||
project_id: Optional[str] = Query(None),
|
||||
unit_id: Optional[str] = Query(None),
|
||||
alert_type: Optional[str] = Query(None, description="Filter by type: device_offline, device_online, schedule_failed"),
|
||||
limit: int = Query(50, le=100),
|
||||
offset: int = Query(0, ge=0),
|
||||
):
|
||||
"""
|
||||
List alerts with optional filters.
|
||||
"""
|
||||
alert_service = get_alert_service(db)
|
||||
|
||||
alerts = alert_service.get_all_alerts(
|
||||
status=status,
|
||||
project_id=project_id,
|
||||
unit_id=unit_id,
|
||||
alert_type=alert_type,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
|
||||
return {
|
||||
"alerts": [
|
||||
{
|
||||
"id": a.id,
|
||||
"alert_type": a.alert_type,
|
||||
"severity": a.severity,
|
||||
"title": a.title,
|
||||
"message": a.message,
|
||||
"status": a.status,
|
||||
"unit_id": a.unit_id,
|
||||
"project_id": a.project_id,
|
||||
"location_id": a.location_id,
|
||||
"created_at": a.created_at.isoformat() if a.created_at else None,
|
||||
"acknowledged_at": a.acknowledged_at.isoformat() if a.acknowledged_at else None,
|
||||
"resolved_at": a.resolved_at.isoformat() if a.resolved_at else None,
|
||||
}
|
||||
for a in alerts
|
||||
],
|
||||
"count": len(alerts),
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/active")
|
||||
async def list_active_alerts(
|
||||
db: Session = Depends(get_db),
|
||||
project_id: Optional[str] = Query(None),
|
||||
unit_id: Optional[str] = Query(None),
|
||||
alert_type: Optional[str] = Query(None),
|
||||
min_severity: Optional[str] = Query(None, description="Minimum severity: info, warning, critical"),
|
||||
limit: int = Query(50, le=100),
|
||||
):
|
||||
"""
|
||||
List only active alerts.
|
||||
"""
|
||||
alert_service = get_alert_service(db)
|
||||
|
||||
alerts = alert_service.get_active_alerts(
|
||||
project_id=project_id,
|
||||
unit_id=unit_id,
|
||||
alert_type=alert_type,
|
||||
min_severity=min_severity,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
return {
|
||||
"alerts": [
|
||||
{
|
||||
"id": a.id,
|
||||
"alert_type": a.alert_type,
|
||||
"severity": a.severity,
|
||||
"title": a.title,
|
||||
"message": a.message,
|
||||
"unit_id": a.unit_id,
|
||||
"project_id": a.project_id,
|
||||
"created_at": a.created_at.isoformat() if a.created_at else None,
|
||||
}
|
||||
for a in alerts
|
||||
],
|
||||
"count": len(alerts),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/active/count")
|
||||
async def get_active_alert_count(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Get count of active alerts (for navbar badge).
|
||||
"""
|
||||
alert_service = get_alert_service(db)
|
||||
count = alert_service.get_active_alert_count()
|
||||
return {"count": count}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Single Alert Operations
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/{alert_id}")
|
||||
async def get_alert(
|
||||
alert_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Get a specific alert.
|
||||
"""
|
||||
alert = db.query(Alert).filter_by(id=alert_id).first()
|
||||
if not alert:
|
||||
raise HTTPException(status_code=404, detail="Alert not found")
|
||||
|
||||
# Get related unit info
|
||||
unit = None
|
||||
if alert.unit_id:
|
||||
unit = db.query(RosterUnit).filter_by(id=alert.unit_id).first()
|
||||
|
||||
return {
|
||||
"id": alert.id,
|
||||
"alert_type": alert.alert_type,
|
||||
"severity": alert.severity,
|
||||
"title": alert.title,
|
||||
"message": alert.message,
|
||||
"metadata": alert.alert_metadata,
|
||||
"status": alert.status,
|
||||
"unit_id": alert.unit_id,
|
||||
"unit_name": unit.id if unit else None,
|
||||
"project_id": alert.project_id,
|
||||
"location_id": alert.location_id,
|
||||
"schedule_id": alert.schedule_id,
|
||||
"created_at": alert.created_at.isoformat() if alert.created_at else None,
|
||||
"acknowledged_at": alert.acknowledged_at.isoformat() if alert.acknowledged_at else None,
|
||||
"resolved_at": alert.resolved_at.isoformat() if alert.resolved_at else None,
|
||||
"expires_at": alert.expires_at.isoformat() if alert.expires_at else None,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{alert_id}/acknowledge")
|
||||
async def acknowledge_alert(
|
||||
alert_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Mark alert as acknowledged.
|
||||
"""
|
||||
alert_service = get_alert_service(db)
|
||||
alert = alert_service.acknowledge_alert(alert_id)
|
||||
|
||||
if not alert:
|
||||
raise HTTPException(status_code=404, detail="Alert not found")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"alert_id": alert.id,
|
||||
"status": alert.status,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{alert_id}/dismiss")
|
||||
async def dismiss_alert(
|
||||
alert_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Dismiss alert.
|
||||
"""
|
||||
alert_service = get_alert_service(db)
|
||||
alert = alert_service.dismiss_alert(alert_id)
|
||||
|
||||
if not alert:
|
||||
raise HTTPException(status_code=404, detail="Alert not found")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"alert_id": alert.id,
|
||||
"status": alert.status,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{alert_id}/resolve")
|
||||
async def resolve_alert(
|
||||
alert_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Manually resolve an alert.
|
||||
"""
|
||||
alert_service = get_alert_service(db)
|
||||
alert = alert_service.resolve_alert(alert_id)
|
||||
|
||||
if not alert:
|
||||
raise HTTPException(status_code=404, detail="Alert not found")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"alert_id": alert.id,
|
||||
"status": alert.status,
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# HTML Partials for HTMX
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/partials/dropdown", response_class=HTMLResponse)
|
||||
async def get_alert_dropdown(
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Return HTML partial for alert dropdown in navbar.
|
||||
"""
|
||||
alert_service = get_alert_service(db)
|
||||
alerts = alert_service.get_active_alerts(limit=10)
|
||||
|
||||
# Calculate relative time for each alert
|
||||
now = datetime.utcnow()
|
||||
alerts_data = []
|
||||
for alert in alerts:
|
||||
delta = now - alert.created_at
|
||||
if delta.days > 0:
|
||||
time_ago = f"{delta.days}d ago"
|
||||
elif delta.seconds >= 3600:
|
||||
time_ago = f"{delta.seconds // 3600}h ago"
|
||||
elif delta.seconds >= 60:
|
||||
time_ago = f"{delta.seconds // 60}m ago"
|
||||
else:
|
||||
time_ago = "just now"
|
||||
|
||||
alerts_data.append({
|
||||
"alert": alert,
|
||||
"time_ago": time_ago,
|
||||
})
|
||||
|
||||
return templates.TemplateResponse("partials/alerts/alert_dropdown.html", {
|
||||
"request": request,
|
||||
"alerts": alerts_data,
|
||||
"total_count": alert_service.get_active_alert_count(),
|
||||
})
|
||||
|
||||
|
||||
@router.get("/partials/list", response_class=HTMLResponse)
|
||||
async def get_alert_list(
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
status: Optional[str] = Query(None),
|
||||
limit: int = Query(20),
|
||||
):
|
||||
"""
|
||||
Return HTML partial for alert list page.
|
||||
"""
|
||||
alert_service = get_alert_service(db)
|
||||
|
||||
if status:
|
||||
alerts = alert_service.get_all_alerts(status=status, limit=limit)
|
||||
else:
|
||||
alerts = alert_service.get_all_alerts(limit=limit)
|
||||
|
||||
# Calculate relative time for each alert
|
||||
now = datetime.utcnow()
|
||||
alerts_data = []
|
||||
for alert in alerts:
|
||||
delta = now - alert.created_at
|
||||
if delta.days > 0:
|
||||
time_ago = f"{delta.days}d ago"
|
||||
elif delta.seconds >= 3600:
|
||||
time_ago = f"{delta.seconds // 3600}h ago"
|
||||
elif delta.seconds >= 60:
|
||||
time_ago = f"{delta.seconds // 60}m ago"
|
||||
else:
|
||||
time_ago = "just now"
|
||||
|
||||
alerts_data.append({
|
||||
"alert": alert,
|
||||
"time_ago": time_ago,
|
||||
})
|
||||
|
||||
return templates.TemplateResponse("partials/alerts/alert_list.html", {
|
||||
"request": request,
|
||||
"alerts": alerts_data,
|
||||
"status_filter": status,
|
||||
})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Cleanup
|
||||
# ============================================================================
|
||||
|
||||
@router.post("/cleanup-expired")
|
||||
async def cleanup_expired_alerts(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Cleanup expired alerts (admin/maintenance endpoint).
|
||||
"""
|
||||
alert_service = get_alert_service(db)
|
||||
count = alert_service.cleanup_expired_alerts()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"cleaned_up": count,
|
||||
}
|
||||
@@ -90,6 +90,40 @@ async def get_project_locations(
|
||||
})
|
||||
|
||||
|
||||
@router.get("/locations-json")
|
||||
async def get_project_locations_json(
|
||||
project_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
location_type: Optional[str] = Query(None),
|
||||
):
|
||||
"""
|
||||
Get all monitoring locations for a project as JSON.
|
||||
Used by the schedule modal to populate location dropdown.
|
||||
"""
|
||||
project = db.query(Project).filter_by(id=project_id).first()
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
query = db.query(MonitoringLocation).filter_by(project_id=project_id)
|
||||
|
||||
if location_type:
|
||||
query = query.filter_by(location_type=location_type)
|
||||
|
||||
locations = query.order_by(MonitoringLocation.name).all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": loc.id,
|
||||
"name": loc.name,
|
||||
"location_type": loc.location_type,
|
||||
"description": loc.description,
|
||||
"address": loc.address,
|
||||
"coordinates": loc.coordinates,
|
||||
}
|
||||
for loc in locations
|
||||
]
|
||||
|
||||
|
||||
@router.post("/locations/create")
|
||||
async def create_location(
|
||||
project_id: str,
|
||||
|
||||
@@ -28,6 +28,7 @@ from backend.models import (
|
||||
UnitAssignment,
|
||||
RecordingSession,
|
||||
ScheduledAction,
|
||||
RecurringSchedule,
|
||||
RosterUnit,
|
||||
)
|
||||
|
||||
|
||||
458
backend/routers/recurring_schedules.py
Normal file
458
backend/routers/recurring_schedules.py
Normal file
@@ -0,0 +1,458 @@
|
||||
"""
|
||||
Recurring Schedules Router
|
||||
|
||||
API endpoints for managing recurring monitoring schedules.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, Query
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from fastapi.responses import HTMLResponse, JSONResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
||||
from backend.database import get_db
|
||||
from backend.models import RecurringSchedule, MonitoringLocation, Project, RosterUnit
|
||||
from backend.services.recurring_schedule_service import get_recurring_schedule_service
|
||||
|
||||
router = APIRouter(prefix="/api/projects/{project_id}/recurring-schedules", tags=["recurring-schedules"])
|
||||
templates = Jinja2Templates(directory="templates")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# List and Get
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/")
|
||||
async def list_recurring_schedules(
|
||||
project_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
enabled_only: bool = Query(False),
|
||||
):
|
||||
"""
|
||||
List all recurring schedules for a project.
|
||||
"""
|
||||
project = db.query(Project).filter_by(id=project_id).first()
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
query = db.query(RecurringSchedule).filter_by(project_id=project_id)
|
||||
if enabled_only:
|
||||
query = query.filter_by(enabled=True)
|
||||
|
||||
schedules = query.order_by(RecurringSchedule.created_at.desc()).all()
|
||||
|
||||
return {
|
||||
"schedules": [
|
||||
{
|
||||
"id": s.id,
|
||||
"name": s.name,
|
||||
"schedule_type": s.schedule_type,
|
||||
"device_type": s.device_type,
|
||||
"location_id": s.location_id,
|
||||
"unit_id": s.unit_id,
|
||||
"enabled": s.enabled,
|
||||
"weekly_pattern": json.loads(s.weekly_pattern) if s.weekly_pattern else None,
|
||||
"interval_type": s.interval_type,
|
||||
"cycle_time": s.cycle_time,
|
||||
"include_download": s.include_download,
|
||||
"timezone": s.timezone,
|
||||
"next_occurrence": s.next_occurrence.isoformat() if s.next_occurrence else None,
|
||||
"last_generated_at": s.last_generated_at.isoformat() if s.last_generated_at else None,
|
||||
"created_at": s.created_at.isoformat() if s.created_at else None,
|
||||
}
|
||||
for s in schedules
|
||||
],
|
||||
"count": len(schedules),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{schedule_id}")
|
||||
async def get_recurring_schedule(
|
||||
project_id: str,
|
||||
schedule_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Get a specific recurring schedule.
|
||||
"""
|
||||
schedule = db.query(RecurringSchedule).filter_by(
|
||||
id=schedule_id,
|
||||
project_id=project_id,
|
||||
).first()
|
||||
|
||||
if not schedule:
|
||||
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||
|
||||
# Get related location and unit info
|
||||
location = db.query(MonitoringLocation).filter_by(id=schedule.location_id).first()
|
||||
unit = None
|
||||
if schedule.unit_id:
|
||||
unit = db.query(RosterUnit).filter_by(id=schedule.unit_id).first()
|
||||
|
||||
return {
|
||||
"id": schedule.id,
|
||||
"name": schedule.name,
|
||||
"schedule_type": schedule.schedule_type,
|
||||
"device_type": schedule.device_type,
|
||||
"location_id": schedule.location_id,
|
||||
"location_name": location.name if location else None,
|
||||
"unit_id": schedule.unit_id,
|
||||
"unit_name": unit.id if unit else None,
|
||||
"enabled": schedule.enabled,
|
||||
"weekly_pattern": json.loads(schedule.weekly_pattern) if schedule.weekly_pattern else None,
|
||||
"interval_type": schedule.interval_type,
|
||||
"cycle_time": schedule.cycle_time,
|
||||
"include_download": schedule.include_download,
|
||||
"timezone": schedule.timezone,
|
||||
"next_occurrence": schedule.next_occurrence.isoformat() if schedule.next_occurrence else None,
|
||||
"last_generated_at": schedule.last_generated_at.isoformat() if schedule.last_generated_at else None,
|
||||
"created_at": schedule.created_at.isoformat() if schedule.created_at else None,
|
||||
"updated_at": schedule.updated_at.isoformat() if schedule.updated_at else None,
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Create
|
||||
# ============================================================================
|
||||
|
||||
@router.post("/")
|
||||
async def create_recurring_schedule(
|
||||
project_id: str,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Create recurring schedules for one or more locations.
|
||||
|
||||
Body for weekly_calendar (supports multiple locations):
|
||||
{
|
||||
"name": "Weeknight Monitoring",
|
||||
"schedule_type": "weekly_calendar",
|
||||
"location_ids": ["uuid1", "uuid2"], // Array of location IDs
|
||||
"weekly_pattern": {
|
||||
"monday": {"enabled": true, "start": "19:00", "end": "07:00"},
|
||||
"tuesday": {"enabled": false},
|
||||
...
|
||||
},
|
||||
"include_download": true,
|
||||
"auto_increment_index": true,
|
||||
"timezone": "America/New_York"
|
||||
}
|
||||
|
||||
Body for simple_interval (supports multiple locations):
|
||||
{
|
||||
"name": "24/7 Continuous",
|
||||
"schedule_type": "simple_interval",
|
||||
"location_ids": ["uuid1", "uuid2"], // Array of location IDs
|
||||
"interval_type": "daily",
|
||||
"cycle_time": "00:00",
|
||||
"include_download": true,
|
||||
"auto_increment_index": true,
|
||||
"timezone": "America/New_York"
|
||||
}
|
||||
|
||||
Legacy single location support (backwards compatible):
|
||||
{
|
||||
"name": "...",
|
||||
"location_id": "uuid", // Single location ID
|
||||
...
|
||||
}
|
||||
"""
|
||||
project = db.query(Project).filter_by(id=project_id).first()
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
data = await request.json()
|
||||
|
||||
# Support both location_ids (array) and location_id (single) for backwards compatibility
|
||||
location_ids = data.get("location_ids", [])
|
||||
if not location_ids and data.get("location_id"):
|
||||
location_ids = [data.get("location_id")]
|
||||
|
||||
if not location_ids:
|
||||
raise HTTPException(status_code=400, detail="At least one location is required")
|
||||
|
||||
# Validate all locations exist
|
||||
locations = db.query(MonitoringLocation).filter(
|
||||
MonitoringLocation.id.in_(location_ids),
|
||||
MonitoringLocation.project_id == project_id,
|
||||
).all()
|
||||
|
||||
if len(locations) != len(location_ids):
|
||||
raise HTTPException(status_code=404, detail="One or more locations not found")
|
||||
|
||||
service = get_recurring_schedule_service(db)
|
||||
created_schedules = []
|
||||
base_name = data.get("name", "Unnamed Schedule")
|
||||
|
||||
# Create a schedule for each location
|
||||
for location in locations:
|
||||
# Determine device type from location
|
||||
device_type = "slm" if location.location_type == "sound" else "seismograph"
|
||||
|
||||
# Append location name if multiple locations
|
||||
schedule_name = f"{base_name} - {location.name}" if len(locations) > 1 else base_name
|
||||
|
||||
schedule = service.create_schedule(
|
||||
project_id=project_id,
|
||||
location_id=location.id,
|
||||
name=schedule_name,
|
||||
schedule_type=data.get("schedule_type", "weekly_calendar"),
|
||||
device_type=device_type,
|
||||
unit_id=data.get("unit_id"),
|
||||
weekly_pattern=data.get("weekly_pattern"),
|
||||
interval_type=data.get("interval_type"),
|
||||
cycle_time=data.get("cycle_time"),
|
||||
include_download=data.get("include_download", True),
|
||||
auto_increment_index=data.get("auto_increment_index", True),
|
||||
timezone=data.get("timezone", "America/New_York"),
|
||||
)
|
||||
created_schedules.append({
|
||||
"schedule_id": schedule.id,
|
||||
"location_id": location.id,
|
||||
"location_name": location.name,
|
||||
})
|
||||
|
||||
return JSONResponse({
|
||||
"success": True,
|
||||
"schedules": created_schedules,
|
||||
"count": len(created_schedules),
|
||||
"message": f"Created {len(created_schedules)} recurring schedule(s)",
|
||||
})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Update
|
||||
# ============================================================================
|
||||
|
||||
@router.put("/{schedule_id}")
|
||||
async def update_recurring_schedule(
|
||||
project_id: str,
|
||||
schedule_id: str,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Update a recurring schedule.
|
||||
"""
|
||||
schedule = db.query(RecurringSchedule).filter_by(
|
||||
id=schedule_id,
|
||||
project_id=project_id,
|
||||
).first()
|
||||
|
||||
if not schedule:
|
||||
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||
|
||||
data = await request.json()
|
||||
service = get_recurring_schedule_service(db)
|
||||
|
||||
# Build update kwargs
|
||||
update_kwargs = {}
|
||||
for field in ["name", "weekly_pattern", "interval_type", "cycle_time",
|
||||
"include_download", "auto_increment_index", "timezone", "unit_id"]:
|
||||
if field in data:
|
||||
update_kwargs[field] = data[field]
|
||||
|
||||
updated = service.update_schedule(schedule_id, **update_kwargs)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"schedule_id": updated.id,
|
||||
"message": "Schedule updated successfully",
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Delete
|
||||
# ============================================================================
|
||||
|
||||
@router.delete("/{schedule_id}")
|
||||
async def delete_recurring_schedule(
|
||||
project_id: str,
|
||||
schedule_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Delete a recurring schedule.
|
||||
"""
|
||||
service = get_recurring_schedule_service(db)
|
||||
deleted = service.delete_schedule(schedule_id)
|
||||
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Schedule deleted successfully",
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Enable/Disable
|
||||
# ============================================================================
|
||||
|
||||
@router.post("/{schedule_id}/enable")
|
||||
async def enable_schedule(
|
||||
project_id: str,
|
||||
schedule_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Enable a disabled schedule.
|
||||
"""
|
||||
service = get_recurring_schedule_service(db)
|
||||
schedule = service.enable_schedule(schedule_id)
|
||||
|
||||
if not schedule:
|
||||
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"schedule_id": schedule.id,
|
||||
"enabled": schedule.enabled,
|
||||
"message": "Schedule enabled",
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{schedule_id}/disable")
|
||||
async def disable_schedule(
|
||||
project_id: str,
|
||||
schedule_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Disable a schedule.
|
||||
"""
|
||||
service = get_recurring_schedule_service(db)
|
||||
schedule = service.disable_schedule(schedule_id)
|
||||
|
||||
if not schedule:
|
||||
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"schedule_id": schedule.id,
|
||||
"enabled": schedule.enabled,
|
||||
"message": "Schedule disabled",
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Preview Generated Actions
|
||||
# ============================================================================
|
||||
|
||||
@router.post("/{schedule_id}/generate-preview")
|
||||
async def preview_generated_actions(
|
||||
project_id: str,
|
||||
schedule_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
days: int = Query(7, ge=1, le=30),
|
||||
):
|
||||
"""
|
||||
Preview what actions would be generated without saving them.
|
||||
"""
|
||||
schedule = db.query(RecurringSchedule).filter_by(
|
||||
id=schedule_id,
|
||||
project_id=project_id,
|
||||
).first()
|
||||
|
||||
if not schedule:
|
||||
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||
|
||||
service = get_recurring_schedule_service(db)
|
||||
actions = service.generate_actions_for_schedule(
|
||||
schedule,
|
||||
horizon_days=days,
|
||||
preview_only=True,
|
||||
)
|
||||
|
||||
return {
|
||||
"schedule_id": schedule_id,
|
||||
"schedule_name": schedule.name,
|
||||
"preview_days": days,
|
||||
"actions": [
|
||||
{
|
||||
"action_type": a.action_type,
|
||||
"scheduled_time": a.scheduled_time.isoformat(),
|
||||
"notes": a.notes,
|
||||
}
|
||||
for a in actions
|
||||
],
|
||||
"action_count": len(actions),
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Manual Generation Trigger
|
||||
# ============================================================================
|
||||
|
||||
@router.post("/{schedule_id}/generate")
|
||||
async def generate_actions_now(
|
||||
project_id: str,
|
||||
schedule_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
days: int = Query(7, ge=1, le=30),
|
||||
):
|
||||
"""
|
||||
Manually trigger action generation for a schedule.
|
||||
"""
|
||||
schedule = db.query(RecurringSchedule).filter_by(
|
||||
id=schedule_id,
|
||||
project_id=project_id,
|
||||
).first()
|
||||
|
||||
if not schedule:
|
||||
raise HTTPException(status_code=404, detail="Schedule not found")
|
||||
|
||||
if not schedule.enabled:
|
||||
raise HTTPException(status_code=400, detail="Schedule is disabled")
|
||||
|
||||
service = get_recurring_schedule_service(db)
|
||||
actions = service.generate_actions_for_schedule(
|
||||
schedule,
|
||||
horizon_days=days,
|
||||
preview_only=False,
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"schedule_id": schedule_id,
|
||||
"generated_count": len(actions),
|
||||
"message": f"Generated {len(actions)} scheduled actions",
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# HTML Partials
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/partials/list", response_class=HTMLResponse)
|
||||
async def get_schedule_list_partial(
|
||||
project_id: str,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Return HTML partial for schedule list.
|
||||
"""
|
||||
schedules = db.query(RecurringSchedule).filter_by(
|
||||
project_id=project_id
|
||||
).order_by(RecurringSchedule.created_at.desc()).all()
|
||||
|
||||
# Enrich with location info
|
||||
schedule_data = []
|
||||
for s in schedules:
|
||||
location = db.query(MonitoringLocation).filter_by(id=s.location_id).first()
|
||||
schedule_data.append({
|
||||
"schedule": s,
|
||||
"location": location,
|
||||
"pattern": json.loads(s.weekly_pattern) if s.weekly_pattern else None,
|
||||
})
|
||||
|
||||
return templates.TemplateResponse("partials/projects/recurring_schedule_list.html", {
|
||||
"request": request,
|
||||
"project_id": project_id,
|
||||
"schedules": schedule_data,
|
||||
})
|
||||
Reference in New Issue
Block a user