Massive update

This commit is contained in:
Logan
2026-04-11 13:44:08 -04:00
parent fd6c2fd8bf
commit 3b3a136d04
31 changed files with 1919 additions and 94 deletions
+74
View File
@@ -0,0 +1,74 @@
import uuid
from datetime import datetime, timezone
from typing import Optional
from fastapi import APIRouter, HTTPException, Depends
from app.models import AlertRule, AlertRuleUpdate
from app.internal import firestore as fstore
from app.internal.auth import require_admin_token
router = APIRouter(tags=["alerts"])
# ---------------------------------------------------------------------------
# Alert events (triggered alerts)
# ---------------------------------------------------------------------------
@router.get("/alerts")
async def list_alerts(acknowledged: Optional[bool] = None):
filters = {}
if acknowledged is not None:
filters["acknowledged"] = acknowledged
return await fstore.collection_list("alert_events", **filters)
@router.post("/alerts/{alert_id}/acknowledge")
async def acknowledge_alert(alert_id: str):
doc = await fstore.doc_get("alert_events", alert_id)
if not doc:
raise HTTPException(404, f"Alert '{alert_id}' not found.")
await fstore.doc_update("alert_events", alert_id, {"acknowledged": True})
return {"ok": True}
# ---------------------------------------------------------------------------
# Alert rules
# ---------------------------------------------------------------------------
@router.get("/alert-rules")
async def list_alert_rules():
return await fstore.collection_list("alert_rules")
@router.post("/alert-rules")
async def create_alert_rule(body: AlertRule, _: dict = Depends(require_admin_token)):
rule_id = str(uuid.uuid4())
doc = {
"rule_id": rule_id,
"name": body.name,
"keywords": body.keywords,
"talkgroup_ids": body.talkgroup_ids,
"enabled": body.enabled,
"discord_webhook": body.discord_webhook,
"created_at": datetime.now(timezone.utc).isoformat(),
}
await fstore.doc_set("alert_rules", rule_id, doc, merge=False)
return doc
@router.put("/alert-rules/{rule_id}")
async def update_alert_rule(rule_id: str, body: AlertRuleUpdate, _: dict = Depends(require_admin_token)):
doc = await fstore.doc_get("alert_rules", rule_id)
if not doc:
raise HTTPException(404, f"Alert rule '{rule_id}' not found.")
updates = body.model_dump(exclude_none=True)
await fstore.doc_update("alert_rules", rule_id, updates)
return {**doc, **updates}
@router.delete("/alert-rules/{rule_id}")
async def delete_alert_rule(rule_id: str, _: dict = Depends(require_admin_token)):
doc = await fstore.doc_get("alert_rules", rule_id)
if not doc:
raise HTTPException(404, f"Alert rule '{rule_id}' not found.")
await fstore.doc_delete("alert_rules", rule_id)
return {"ok": True}
+83
View File
@@ -0,0 +1,83 @@
import uuid
from datetime import datetime, timezone
from typing import Optional
from fastapi import APIRouter, HTTPException, Depends
from app.models import IncidentCreate, IncidentUpdate
from app.internal import firestore as fstore
from app.internal.auth import require_admin_token
router = APIRouter(prefix="/incidents", tags=["incidents"])
@router.get("")
async def list_incidents(status: Optional[str] = None, type: Optional[str] = None):
filters = {}
if status:
filters["status"] = status
if type:
filters["type"] = type
return await fstore.collection_list("incidents", **filters)
@router.get("/{incident_id}")
async def get_incident(incident_id: str):
doc = await fstore.doc_get("incidents", incident_id)
if not doc:
raise HTTPException(404, f"Incident '{incident_id}' not found.")
return doc
@router.post("")
async def create_incident(body: IncidentCreate, _: dict = Depends(require_admin_token)):
now = datetime.now(timezone.utc).isoformat()
incident_id = str(uuid.uuid4())
doc = {
"incident_id": incident_id,
"title": body.title,
"type": body.type,
"status": body.status,
"location": body.location,
"call_ids": body.call_ids,
"summary": body.summary,
"tags": body.tags,
"started_at": now,
"updated_at": now,
}
await fstore.doc_set("incidents", incident_id, doc, merge=False)
return doc
@router.put("/{incident_id}")
async def update_incident(incident_id: str, body: IncidentUpdate, _: dict = Depends(require_admin_token)):
doc = await fstore.doc_get("incidents", incident_id)
if not doc:
raise HTTPException(404, f"Incident '{incident_id}' not found.")
updates = body.model_dump(exclude_none=True)
updates["updated_at"] = datetime.now(timezone.utc).isoformat()
await fstore.doc_update("incidents", incident_id, updates)
return {**doc, **updates}
@router.delete("/{incident_id}")
async def delete_incident(incident_id: str, _: dict = Depends(require_admin_token)):
doc = await fstore.doc_get("incidents", incident_id)
if not doc:
raise HTTPException(404, f"Incident '{incident_id}' not found.")
await fstore.doc_delete("incidents", incident_id)
return {"ok": True}
@router.post("/{incident_id}/calls/{call_id}")
async def link_call_to_incident(incident_id: str, call_id: str, _: dict = Depends(require_admin_token)):
doc = await fstore.doc_get("incidents", incident_id)
if not doc:
raise HTTPException(404, f"Incident '{incident_id}' not found.")
call_ids = doc.get("call_ids", [])
if call_id not in call_ids:
call_ids.append(call_id)
await fstore.doc_update("incidents", incident_id, {
"call_ids": call_ids,
"updated_at": datetime.now(timezone.utc).isoformat(),
})
await fstore.doc_update("calls", call_id, {"incident_id": incident_id})
return {"ok": True}
+13
View File
@@ -66,6 +66,19 @@ async def send_command(node_id: str, cmd: CommandPayload):
return {"ok": True}
@router.post("/{node_id}/reissue-key")
async def reissue_node_key(node_id: str, _: dict = Depends(require_admin_token)):
"""Generate a new API key for the node and push it via MQTT (retained).
Use this to rotate a key or recover a node whose key was lost."""
node = await fstore.doc_get("nodes", node_id)
if not node:
raise HTTPException(404, f"Node '{node_id}' not found.")
api_key = secrets.token_hex(32)
await fstore.doc_set("node_keys", node_id, {"node_id": node_id, "api_key": api_key}, merge=False)
mqtt_handler.publish_node_key(node_id, api_key)
return {"ok": True}
@router.post("/{node_id}/config/{system_id}")
async def assign_system(node_id: str, system_id: str):
"""
+92 -3
View File
@@ -1,5 +1,5 @@
from typing import Optional
from fastapi import APIRouter, UploadFile, File, Form, HTTPException, Security
from fastapi import APIRouter, BackgroundTasks, UploadFile, File, Form, HTTPException, Security
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from app.internal.storage import upload_audio
from app.internal import firestore as fstore
@@ -12,20 +12,32 @@ _bearer = HTTPBearer(auto_error=False)
@router.post("/upload")
async def upload_call_audio(
background_tasks: BackgroundTasks,
file: UploadFile = File(...),
call_id: str = Form(...),
node_id: str = Form(...),
talkgroup_id: Optional[int] = Form(None),
talkgroup_name: Optional[str] = Form(None),
system_id: Optional[str] = Form(None),
credentials: Optional[HTTPAuthorizationCredentials] = Security(_bearer),
):
"""
Receive an audio recording from an edge node.
Upload to GCS, update the call document in Firestore with the audio URL.
Upload to GCS, update the call document in Firestore with the audio URL,
then kick off the intelligence pipeline as a background task.
"""
# Verify the per-node API key
if not credentials:
raise HTTPException(401, "Missing authorization")
key_doc = await fstore.doc_get("node_keys", node_id)
if not key_doc or key_doc.get("api_key") != credentials.credentials:
if not key_doc:
logger.warning(f"Upload 401: no key_doc in Firestore for node_id={node_id!r}")
raise HTTPException(401, "Invalid node API key")
if key_doc.get("api_key") != credentials.credentials:
logger.warning(
f"Upload 401: key mismatch for node_id={node_id!r} "
f"(received prefix: {credentials.credentials[:8]}...)"
)
raise HTTPException(401, "Invalid node API key")
data = await file.read()
@@ -41,4 +53,81 @@ async def upload_call_audio(
except Exception as e:
logger.warning(f"Could not update call {call_id} with audio_url: {e}")
# Convert public GCS URL to gs:// URI for Speech-to-Text
gcs_uri = _public_url_to_gcs_uri(audio_url)
background_tasks.add_task(
_run_intelligence_pipeline,
call_id=call_id,
node_id=node_id,
system_id=system_id,
talkgroup_id=talkgroup_id,
talkgroup_name=talkgroup_name,
gcs_uri=gcs_uri,
)
return {"url": audio_url}
def _public_url_to_gcs_uri(url: str) -> Optional[str]:
"""
Convert a public GCS URL like
https://storage.googleapis.com/bucket/calls/file.mp3
to a gs:// URI usable by Speech-to-Text.
Returns None if the URL doesn't look like a GCS URL.
"""
prefix = "https://storage.googleapis.com/"
if url and url.startswith(prefix):
return "gs://" + url[len(prefix):]
return None
async def _run_intelligence_pipeline(
call_id: str,
node_id: str,
system_id: Optional[str],
talkgroup_id: Optional[int],
talkgroup_name: Optional[str],
gcs_uri: Optional[str],
) -> None:
"""
Post-upload intelligence pipeline (runs as a background task):
1. Transcribe audio via Google STT
2. Extract tags/incident type from transcript
3. Correlate with existing incidents (or create new one)
4. Check alert rules and dispatch notifications
"""
from app.internal import transcription, intelligence, incident_correlator, alerter
transcript: Optional[str] = None
# Step 1: Transcription
if gcs_uri:
transcript = await transcription.transcribe_call(call_id, gcs_uri)
# Step 2: Intelligence extraction
tags: list[str] = []
incident_type: Optional[str] = None
if transcript:
tags, incident_type = await intelligence.extract_tags(call_id, transcript)
# Step 3: Incident correlation
if incident_type:
await incident_correlator.correlate_call(
call_id=call_id,
node_id=node_id,
system_id=system_id,
talkgroup_name=talkgroup_name,
tags=tags,
incident_type=incident_type,
)
# Step 4: Alert dispatch (always runs — talkgroup ID rules don't need a transcript)
await alerter.check_and_dispatch(
call_id=call_id,
node_id=node_id,
talkgroup_id=talkgroup_id,
talkgroup_name=talkgroup_name,
tags=tags,
transcript=transcript,
)