Compare commits
7 Commits
19ac4b48be
...
implement-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
48beb79922 | ||
|
|
98727615a3 | ||
|
|
706f5a0e20 | ||
|
|
1be65c226f | ||
|
|
313da3684d | ||
|
|
80f5eb3f50 | ||
|
|
497cbccc80 |
@@ -1,9 +1,5 @@
|
|||||||
NODE_ID=
|
NODE_ID=
|
||||||
MQTT_BROKER=
|
MQTT_BROKER=
|
||||||
ICECAST_SERVER=
|
ICECAST_SERVER=
|
||||||
AUDIO_BUCKET=
|
|
||||||
NODE_LAT=
|
NODE_LAT=
|
||||||
NODE_LONG=
|
NODE_LONG=
|
||||||
HTTP_SERVER_PROTOCOL=
|
|
||||||
HTTP_SERVER_ADDRESS=
|
|
||||||
HTTP_SERVER_PORT=
|
|
||||||
37
.gitea/workflows/run-tests.yml
Normal file
37
.gitea/workflows/run-tests.yml
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
name: Python Application Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "*" ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
python-version: ["3.13"]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
# Install test dependencies
|
||||||
|
pip install pytest pytest-asyncio httpx
|
||||||
|
# Install application dependencies (assuming you have a requirements.txt)
|
||||||
|
# If you don't have one, create it with `pip freeze > requirements.txt`
|
||||||
|
# For now, we'll install the dependencies we know are needed from context
|
||||||
|
pip install fastapi "uvicorn[standard]" paho-mqtt requests
|
||||||
|
|
||||||
|
- name: Test with pytest
|
||||||
|
run: |
|
||||||
|
pytest
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -2,5 +2,4 @@
|
|||||||
*.log
|
*.log
|
||||||
*.db
|
*.db
|
||||||
*.conf
|
*.conf
|
||||||
configs/*
|
configs/*
|
||||||
*.json
|
|
||||||
@@ -7,7 +7,7 @@ ENV DEBIAN_FRONTEND=noninteractive
|
|||||||
# Install system dependencies
|
# Install system dependencies
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get upgrade -y && \
|
apt-get upgrade -y && \
|
||||||
apt-get install git pulseaudio pulseaudio-utils liquidsoap ffmpeg -y
|
apt-get install git pulseaudio pulseaudio-utils liquidsoap -y
|
||||||
|
|
||||||
# Clone the boatbod op25 repository
|
# Clone the boatbod op25 repository
|
||||||
RUN git clone -b gr310 https://github.com/boatbod/op25 /op25
|
RUN git clone -b gr310 https://github.com/boatbod/op25 /op25
|
||||||
@@ -34,9 +34,6 @@ EXPOSE 8001 8081
|
|||||||
# Create and set up the configuration directory
|
# Create and set up the configuration directory
|
||||||
VOLUME ["/configs"]
|
VOLUME ["/configs"]
|
||||||
|
|
||||||
# Create the calls local cache directory
|
|
||||||
VOLUME ["/calls"]
|
|
||||||
|
|
||||||
# Set the working directory in the container
|
# Set the working directory in the container
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|||||||
@@ -1,222 +0,0 @@
|
|||||||
import discord
|
|
||||||
import asyncio
|
|
||||||
import socket
|
|
||||||
import logging
|
|
||||||
import threading
|
|
||||||
import struct
|
|
||||||
import subprocess
|
|
||||||
import shlex
|
|
||||||
from collections import deque
|
|
||||||
from typing import Optional, List, Tuple
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger("discord_radio")
|
|
||||||
|
|
||||||
class UDPAudioSource(discord.AudioSource):
|
|
||||||
"""
|
|
||||||
A custom Discord AudioSource that reads raw PCM audio from a thread-safe buffer.
|
|
||||||
This buffer is filled by the UDPListener running in the background.
|
|
||||||
|
|
||||||
It pipes the raw 8000Hz 16-bit audio into FFmpeg to convert it to
|
|
||||||
Discord's required 48000Hz stereo Opus format.
|
|
||||||
"""
|
|
||||||
def __init__(self, buffer: deque):
|
|
||||||
self.buffer = buffer
|
|
||||||
# We start an FFmpeg process that reads from stdin (pipe) and outputs raw PCM (s16le) at 48k
|
|
||||||
# discord.py then encodes this to Opus.
|
|
||||||
# Note: We use a pipe here because we are feeding it chunks from our UDP buffer.
|
|
||||||
|
|
||||||
self.ffmpeg_process = subprocess.Popen(
|
|
||||||
shlex.split(
|
|
||||||
"ffmpeg -f s16le -ar 8000 -ac 1 -i pipe:0 -f s16le -ar 48000 -ac 2 pipe:1 -loglevel panic"
|
|
||||||
),
|
|
||||||
stdin=subprocess.PIPE,
|
|
||||||
stdout=subprocess.PIPE
|
|
||||||
)
|
|
||||||
self._buffer_lock = threading.Lock()
|
|
||||||
|
|
||||||
def read(self) -> bytes:
|
|
||||||
"""
|
|
||||||
Reads 20ms of audio from the buffer, feeds it to ffmpeg,
|
|
||||||
and returns the converted bytes to Discord.
|
|
||||||
"""
|
|
||||||
# Discord requests 20ms of audio.
|
|
||||||
# At 8000Hz 16-bit mono, 1 second = 16000 bytes. 20ms = 320 bytes.
|
|
||||||
chunk_size = 320
|
|
||||||
|
|
||||||
data = b''
|
|
||||||
|
|
||||||
# Pull data from the UDP thread buffer
|
|
||||||
with self._buffer_lock:
|
|
||||||
if len(self.buffer) >= chunk_size:
|
|
||||||
# Optimized deque slicing
|
|
||||||
data = bytes([self.buffer.popleft() for _ in range(chunk_size)])
|
|
||||||
else:
|
|
||||||
# If we don't have enough data (buffer underrun), send silence to keep pipe open
|
|
||||||
data = b'\x00' * chunk_size
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Write raw 8k mono to ffmpeg stdin
|
|
||||||
self.ffmpeg_process.stdin.write(data)
|
|
||||||
self.ffmpeg_process.stdin.flush()
|
|
||||||
|
|
||||||
# Read converted 48k stereo from ffmpeg stdout
|
|
||||||
# 48000 Hz * 2 channels * 2 bytes (16bit) * 0.02s = 3840 bytes
|
|
||||||
converted_data = self.ffmpeg_process.stdout.read(3840)
|
|
||||||
return converted_data
|
|
||||||
except Exception as e:
|
|
||||||
LOGGER.error(f"Error in FFmpeg conversion: {e}")
|
|
||||||
return b'\x00' * 3840
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
if self.ffmpeg_process:
|
|
||||||
self.ffmpeg_process.kill()
|
|
||||||
|
|
||||||
class DiscordRadioBot(discord.Client):
|
|
||||||
"""
|
|
||||||
A dedicated Discord Client for streaming Radio traffic.
|
|
||||||
|
|
||||||
Features:
|
|
||||||
- Independent UDP Listener (non-blocking)
|
|
||||||
- Packet Forwarding (to keep Liquidsoap/Icecast working)
|
|
||||||
- Manual Channel Move detection
|
|
||||||
- Dynamic Token/Channel joining
|
|
||||||
"""
|
|
||||||
def __init__(self, listen_port=23456, forward_ports: List[int] = None):
|
|
||||||
intents = discord.Intents.default()
|
|
||||||
super().__init__(intents=intents)
|
|
||||||
|
|
||||||
self.listen_port = listen_port
|
|
||||||
self.forward_ports = forward_ports or [] # List of ports to forward UDP packets to (e.g. [23457])
|
|
||||||
|
|
||||||
self.audio_buffer = deque(maxlen=160000) # Buffer ~10 seconds of audio
|
|
||||||
self.udp_sock = None
|
|
||||||
self.running = False
|
|
||||||
self.voice_client: Optional[discord.VoiceClient] = None
|
|
||||||
|
|
||||||
# Start the UDP Listener in a separate daemon thread immediately
|
|
||||||
self.udp_thread = threading.Thread(target=self._udp_listener_loop, daemon=True)
|
|
||||||
self.udp_thread.start()
|
|
||||||
|
|
||||||
def _udp_listener_loop(self):
|
|
||||||
"""
|
|
||||||
Runs in a background thread. Listens for UDP packets from OP25.
|
|
||||||
1. Adds data to internal audio buffer for Discord.
|
|
||||||
2. Forwards data to other local ports (for Liquidsoap).
|
|
||||||
"""
|
|
||||||
self.udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
||||||
self.udp_sock.bind(('127.0.0.1', self.listen_port))
|
|
||||||
self.udp_sock.settimeout(1.0)
|
|
||||||
|
|
||||||
LOGGER.info(f"UDP Audio Bridge listening on 127.0.0.1:{self.listen_port}")
|
|
||||||
if self.forward_ports:
|
|
||||||
LOGGER.info(f"Forwarding audio to: {self.forward_ports}")
|
|
||||||
|
|
||||||
forward_socks = []
|
|
||||||
for port in self.forward_ports:
|
|
||||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
||||||
forward_socks.append((s, port))
|
|
||||||
|
|
||||||
self.running = True
|
|
||||||
while self.running:
|
|
||||||
try:
|
|
||||||
data, addr = self.udp_sock.recvfrom(4096)
|
|
||||||
|
|
||||||
# 1. Forward to Liquidsoap/Other tools
|
|
||||||
for sock, port in forward_socks:
|
|
||||||
sock.sendto(data, ('127.0.0.1', port))
|
|
||||||
|
|
||||||
# 2. Add to Discord Buffer
|
|
||||||
# We extend the deque with the raw bytes
|
|
||||||
self.audio_buffer.extend(data)
|
|
||||||
|
|
||||||
except socket.timeout:
|
|
||||||
continue
|
|
||||||
except Exception as e:
|
|
||||||
LOGGER.error(f"UDP Listener Error: {e}")
|
|
||||||
# Prevent tight loop on socket fail
|
|
||||||
asyncio.sleep(0.1)
|
|
||||||
|
|
||||||
async def on_ready(self):
|
|
||||||
LOGGER.info(f'Discord Radio Bot connected as {self.user}')
|
|
||||||
|
|
||||||
async def on_voice_state_update(self, member, before, after):
|
|
||||||
"""
|
|
||||||
Handles manual moves. If the bot is moved to a new channel by a user,
|
|
||||||
update our internal voice_client reference to ensure we keep streaming.
|
|
||||||
"""
|
|
||||||
if member.id == self.user.id:
|
|
||||||
if after.channel is not None and before.channel != after.channel:
|
|
||||||
LOGGER.info(f"Bot was moved to channel: {after.channel.name}")
|
|
||||||
# discord.py handles the connection handoff automatically,
|
|
||||||
# but we log it to confirm persistence.
|
|
||||||
self.voice_client = member.guild.voice_client
|
|
||||||
|
|
||||||
async def start_session(self, token: str, channel_id: int):
|
|
||||||
"""
|
|
||||||
Connects the bot to Discord and joins the specified channel.
|
|
||||||
This should be called when the 'start' command is received from C2.
|
|
||||||
"""
|
|
||||||
# Start the client in the background
|
|
||||||
asyncio.create_task(self.start(token))
|
|
||||||
|
|
||||||
# Wait for login to complete (simple poll)
|
|
||||||
for _ in range(20):
|
|
||||||
if self.is_ready():
|
|
||||||
break
|
|
||||||
await asyncio.sleep(0.5)
|
|
||||||
|
|
||||||
# Join Channel
|
|
||||||
try:
|
|
||||||
channel = self.get_channel(int(channel_id))
|
|
||||||
if not channel:
|
|
||||||
# If cache not ready, try fetching
|
|
||||||
channel = await self.fetch_channel(int(channel_id))
|
|
||||||
|
|
||||||
if channel:
|
|
||||||
self.voice_client = await channel.connect()
|
|
||||||
LOGGER.info(f"Joined voice channel: {channel.name}")
|
|
||||||
else:
|
|
||||||
LOGGER.error(f"Could not find channel ID: {channel_id}")
|
|
||||||
except Exception as e:
|
|
||||||
LOGGER.error(f"Failed to join voice: {e}")
|
|
||||||
|
|
||||||
async def stop_session(self):
|
|
||||||
"""
|
|
||||||
Disconnects voice and closes the bot connection.
|
|
||||||
"""
|
|
||||||
if self.voice_client:
|
|
||||||
await self.voice_client.disconnect()
|
|
||||||
await self.close()
|
|
||||||
|
|
||||||
def update_system_presence(self, system_name: str):
|
|
||||||
"""
|
|
||||||
Updates the "Playing..." status of the bot.
|
|
||||||
"""
|
|
||||||
activity = discord.Activity(
|
|
||||||
type=discord.ActivityType.listening,
|
|
||||||
name=f"{system_name}"
|
|
||||||
)
|
|
||||||
asyncio.create_task(self.change_presence(activity=activity))
|
|
||||||
|
|
||||||
def start_transmission(self):
|
|
||||||
"""
|
|
||||||
Called when a radio call STARTS.
|
|
||||||
Connects the audio buffer to the voice stream.
|
|
||||||
This turns the green ring ON.
|
|
||||||
"""
|
|
||||||
if self.voice_client and not self.voice_client.is_playing():
|
|
||||||
# clear buffer slightly to ensure we aren't playing old data
|
|
||||||
# but keep a little to prevent jitter
|
|
||||||
# self.audio_buffer.clear()
|
|
||||||
|
|
||||||
source = UDPAudioSource(self.audio_buffer)
|
|
||||||
self.voice_client.play(source)
|
|
||||||
|
|
||||||
def stop_transmission(self):
|
|
||||||
"""
|
|
||||||
Called when a radio call ENDS.
|
|
||||||
Stops the stream.
|
|
||||||
This turns the green ring OFF.
|
|
||||||
"""
|
|
||||||
if self.voice_client and self.voice_client.is_playing():
|
|
||||||
self.voice_client.stop()
|
|
||||||
@@ -153,6 +153,6 @@ def get_current_system_from_config() -> str:
|
|||||||
if not data:
|
if not data:
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
return data.get("trunking", {}).get("chans", [{}])[0].get("sysname", "Unknown System")
|
return data.get("trunking", {}).get("sysname", "Unknown System")
|
||||||
except:
|
except:
|
||||||
return "Unknown System"
|
return "Unknown System"
|
||||||
@@ -38,7 +38,7 @@ class ChannelConfig(BaseModel):
|
|||||||
cqpsk_tracking: Optional[bool] = None
|
cqpsk_tracking: Optional[bool] = None
|
||||||
frequency: Optional[float] = None
|
frequency: Optional[float] = None
|
||||||
nbfmSquelch: Optional[float] = None
|
nbfmSquelch: Optional[float] = None
|
||||||
destination: Optional[str] = "udp://127.0.0.1:23457"
|
destination: Optional[str] = "udp://127.0.0.1:23456"
|
||||||
tracking_threshold: Optional[int] = 120
|
tracking_threshold: Optional[int] = 120
|
||||||
tracking_feedback: Optional[float] = 0.75
|
tracking_feedback: Optional[float] = 0.75
|
||||||
excess_bw: Optional[float] = 0.2
|
excess_bw: Optional[float] = 0.2
|
||||||
|
|||||||
159
app/node_main.py
159
app/node_main.py
@@ -9,7 +9,6 @@ from internal.logger import create_logger
|
|||||||
from internal.op25_config_utls import scan_local_library
|
from internal.op25_config_utls import scan_local_library
|
||||||
import paho.mqtt.client as mqtt
|
import paho.mqtt.client as mqtt
|
||||||
import requests
|
import requests
|
||||||
from internal.discord_radio import DiscordRadioBot
|
|
||||||
|
|
||||||
# Initialize logging
|
# Initialize logging
|
||||||
LOGGER = create_logger(__name__)
|
LOGGER = create_logger(__name__)
|
||||||
@@ -21,18 +20,12 @@ app.include_router(create_op25_router(), prefix="/op25")
|
|||||||
# Configuration
|
# Configuration
|
||||||
NODE_ID = os.getenv("NODE_ID", "standalone-node")
|
NODE_ID = os.getenv("NODE_ID", "standalone-node")
|
||||||
MQTT_BROKER = os.getenv("MQTT_BROKER", None)
|
MQTT_BROKER = os.getenv("MQTT_BROKER", None)
|
||||||
HTTP_SERVER_PROTOCOL = os.getenv("HTTP_SERVER_PROTOCOL", "http")
|
|
||||||
HTTP_SERVER_ADDRESS = os.getenv("HTTP_SERVER_ADDRESS", "127.0.0.1")
|
|
||||||
HTTP_SERVER_PORT = os.getenv("HTTP_SERVER_PORT", 8000)
|
|
||||||
NODE_LAT = os.getenv("NODE_LAT")
|
NODE_LAT = os.getenv("NODE_LAT")
|
||||||
NODE_LONG = os.getenv("NODE_LONG")
|
NODE_LONG = os.getenv("NODE_LONG")
|
||||||
|
|
||||||
# Global flag to track MQTT connection state
|
# Global flag to track MQTT connection state
|
||||||
MQTT_CONNECTED = False
|
MQTT_CONNECTED = False
|
||||||
|
|
||||||
# Initialize the Discord Bot
|
|
||||||
discord_bot = DiscordRadioBot(listen_port=23457, forward_ports=[23456])
|
|
||||||
|
|
||||||
def handle_c2_command(topic, payload):
|
def handle_c2_command(topic, payload):
|
||||||
"""
|
"""
|
||||||
Parses and routes commands received from the C2 server by calling the
|
Parses and routes commands received from the C2 server by calling the
|
||||||
@@ -109,17 +102,6 @@ def handle_c2_command(topic, payload):
|
|||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
LOGGER.error(f"Failed to connect to OP25 terminal for tuning: {e}")
|
LOGGER.error(f"Failed to connect to OP25 terminal for tuning: {e}")
|
||||||
|
|
||||||
elif command_type == "discord_join":
|
|
||||||
token = data.get("token")
|
|
||||||
channel_id = data.get("channel_id")
|
|
||||||
if token and channel_id:
|
|
||||||
asyncio.create_task(discord_bot.start_session(token, channel_id))
|
|
||||||
LOGGER.info("Initiating Discord Session...")
|
|
||||||
|
|
||||||
elif command_type == "discord_leave":
|
|
||||||
asyncio.create_task(discord_bot.stop_session())
|
|
||||||
LOGGER.info("Ending Discord Session...")
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
LOGGER.warning(f"Unknown command type received: {command_type}")
|
LOGGER.warning(f"Unknown command type received: {command_type}")
|
||||||
|
|
||||||
@@ -130,37 +112,6 @@ def handle_c2_command(topic, payload):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOGGER.error(f"Error processing C2 command: {e}")
|
LOGGER.error(f"Error processing C2 command: {e}")
|
||||||
|
|
||||||
def get_current_stream_url():
|
|
||||||
"""
|
|
||||||
Dynamically resolves the audio stream URL from the active OP25 configuration.
|
|
||||||
Falls back to env var or default if config is missing/invalid.
|
|
||||||
"""
|
|
||||||
default_url = os.getenv("STREAM_URL", "http://127.0.0.1:8000/stream_0")
|
|
||||||
config_path = "/configs/active.cfg.json"
|
|
||||||
|
|
||||||
if not os.path.exists(config_path):
|
|
||||||
return default_url
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(config_path, "r") as f:
|
|
||||||
config = json.load(f)
|
|
||||||
|
|
||||||
streams = config.get("metadata", {}).get("streams", [])
|
|
||||||
if not streams:
|
|
||||||
return default_url
|
|
||||||
|
|
||||||
stream = streams[0]
|
|
||||||
address = stream.get("icecastServerAddress", "127.0.0.1:8000")
|
|
||||||
mount = stream.get("icecastMountpoint", "stream_0")
|
|
||||||
|
|
||||||
if not mount.startswith("/"):
|
|
||||||
mount = f"/{mount}"
|
|
||||||
|
|
||||||
return f"http://{address}{mount}"
|
|
||||||
except Exception as e:
|
|
||||||
LOGGER.warning(f"Failed to resolve stream URL from config: {e}")
|
|
||||||
return default_url
|
|
||||||
|
|
||||||
async def mqtt_lifecycle_manager():
|
async def mqtt_lifecycle_manager():
|
||||||
"""
|
"""
|
||||||
Manages the application-level logic: Check-in, Heartbeats, and Shutdown.
|
Manages the application-level logic: Check-in, Heartbeats, and Shutdown.
|
||||||
@@ -202,7 +153,7 @@ async def mqtt_lifecycle_manager():
|
|||||||
payload = {
|
payload = {
|
||||||
"node_id": NODE_ID,
|
"node_id": NODE_ID,
|
||||||
"status": "online",
|
"status": "online",
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
"timestamp": datetime.now().isoformat(),
|
||||||
"is_listening": op25_status.get("is_running", False),
|
"is_listening": op25_status.get("is_running", False),
|
||||||
"active_system": op25_status.get("active_system"),
|
"active_system": op25_status.get("active_system"),
|
||||||
# Only scan library if needed, otherwise it's heavy I/O
|
# Only scan library if needed, otherwise it's heavy I/O
|
||||||
@@ -246,39 +197,6 @@ async def mqtt_lifecycle_manager():
|
|||||||
# This is the specific payload the OP25 web interface uses [cite: 45562, 45563]
|
# This is the specific payload the OP25 web interface uses [cite: 45562, 45563]
|
||||||
COMMAND_PAYLOAD = [{"command": "update", "arg1": 0, "arg2": 0}]
|
COMMAND_PAYLOAD = [{"command": "update", "arg1": 0, "arg2": 0}]
|
||||||
|
|
||||||
# Audio Recording State
|
|
||||||
recorder_proc = None
|
|
||||||
current_call_id = None
|
|
||||||
|
|
||||||
async def stop_recording():
|
|
||||||
nonlocal recorder_proc
|
|
||||||
if recorder_proc:
|
|
||||||
if recorder_proc.returncode is None:
|
|
||||||
recorder_proc.terminate()
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(recorder_proc.wait(), timeout=2.0)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
recorder_proc.kill()
|
|
||||||
recorder_proc = None
|
|
||||||
|
|
||||||
def upload_audio(call_id):
|
|
||||||
if not MQTT_BROKER: return None
|
|
||||||
local_path = f"/calls/{call_id}.mp3"
|
|
||||||
if not os.path.exists(local_path): return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(local_path, "rb") as f:
|
|
||||||
files = {"file": (f"{call_id}.mp3", f, "audio/mpeg")}
|
|
||||||
response = requests.post(f"{HTTP_SERVER_PROTOCOL}://{HTTP_SERVER_ADDRESS}:{HTTP_SERVER_PORT}/upload", files=files, data={"node_id": NODE_ID, "call_id": call_id}, timeout=30)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json().get("url")
|
|
||||||
except Exception as e:
|
|
||||||
LOGGER.error(f"Upload failed: {e}")
|
|
||||||
return None
|
|
||||||
finally:
|
|
||||||
if os.path.exists(local_path):
|
|
||||||
os.remove(local_path)
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
if not MQTT_CONNECTED:
|
if not MQTT_CONNECTED:
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
@@ -294,7 +212,7 @@ async def mqtt_lifecycle_manager():
|
|||||||
|
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
data = response.json()
|
data = response.json()
|
||||||
# LOGGER.debug(f"Response from OP25 API: {data}")
|
LOGGER.debug(f"Response from OP25 API: {data}")
|
||||||
|
|
||||||
current_tgid = 0
|
current_tgid = 0
|
||||||
current_meta = {}
|
current_meta = {}
|
||||||
@@ -322,7 +240,7 @@ async def mqtt_lifecycle_manager():
|
|||||||
break
|
break
|
||||||
if current_tgid: break
|
if current_tgid: break
|
||||||
|
|
||||||
now = datetime.utcnow()
|
now = datetime.now()
|
||||||
|
|
||||||
# Logic for handling call start/end events
|
# Logic for handling call start/end events
|
||||||
if current_tgid != 0:
|
if current_tgid != 0:
|
||||||
@@ -330,59 +248,12 @@ async def mqtt_lifecycle_manager():
|
|||||||
|
|
||||||
if current_tgid != last_tgid:
|
if current_tgid != last_tgid:
|
||||||
if last_tgid != 0:
|
if last_tgid != 0:
|
||||||
# --- END PREVIOUS CALL ---
|
|
||||||
await stop_recording()
|
|
||||||
|
|
||||||
# Stop Discord Transmission
|
|
||||||
discord_bot.stop_transmission()
|
|
||||||
|
|
||||||
audio_url = None
|
|
||||||
if current_call_id:
|
|
||||||
audio_url = await loop.run_in_executor(None, upload_audio, current_call_id)
|
|
||||||
|
|
||||||
LOGGER.debug(f"Switching TGID: {last_tgid} -> {current_tgid}")
|
LOGGER.debug(f"Switching TGID: {last_tgid} -> {current_tgid}")
|
||||||
payload = {
|
payload = {"node_id": NODE_ID, "timestamp": now.isoformat(), "event": "call_end", "metadata": last_metadata}
|
||||||
"node_id": NODE_ID,
|
|
||||||
"timestamp": now.isoformat(),
|
|
||||||
"event": "call_end",
|
|
||||||
"metadata": last_metadata,
|
|
||||||
"audio_url": audio_url,
|
|
||||||
"call_id": current_call_id
|
|
||||||
}
|
|
||||||
client.publish(f"nodes/{NODE_ID}/metadata", json.dumps(payload), qos=0)
|
client.publish(f"nodes/{NODE_ID}/metadata", json.dumps(payload), qos=0)
|
||||||
|
|
||||||
# --- START NEW CALL ---
|
|
||||||
LOGGER.debug(f"Call Start: TGID {current_tgid} ({current_meta.get('alpha_tag')})")
|
LOGGER.debug(f"Call Start: TGID {current_tgid} ({current_meta.get('alpha_tag')})")
|
||||||
|
payload = {"node_id": NODE_ID, "timestamp": now.isoformat(), "event": "call_start", "metadata": current_meta}
|
||||||
# Trigger Discord Transmission
|
|
||||||
discord_bot.start_transmission()
|
|
||||||
discord_bot.update_system_presence(current_meta.get('sysname', 'Scanning'))
|
|
||||||
|
|
||||||
# Generate ID
|
|
||||||
start_ts = int(now.timestamp())
|
|
||||||
sysname = current_meta.get('sysname', 'unknown')
|
|
||||||
tgid = current_meta.get('tgid', '0')
|
|
||||||
current_call_id = f"{NODE_ID}_{sysname}_{tgid}_{start_ts}"
|
|
||||||
|
|
||||||
# Start Recording (FFmpeg)
|
|
||||||
try:
|
|
||||||
stream_url = get_current_stream_url()
|
|
||||||
recorder_proc = await asyncio.create_subprocess_exec(
|
|
||||||
"ffmpeg", "-i", stream_url, "-y", "-t", "300",
|
|
||||||
f"/calls/{current_call_id}.mp3",
|
|
||||||
stdout=asyncio.subprocess.DEVNULL,
|
|
||||||
stderr=asyncio.subprocess.DEVNULL
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
LOGGER.error(f"Failed to start recorder: {e}")
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"node_id": NODE_ID,
|
|
||||||
"timestamp": now.isoformat(),
|
|
||||||
"event": "call_start",
|
|
||||||
"metadata": current_meta,
|
|
||||||
"call_id": current_call_id
|
|
||||||
}
|
|
||||||
client.publish(f"nodes/{NODE_ID}/metadata", json.dumps(payload), qos=0)
|
client.publish(f"nodes/{NODE_ID}/metadata", json.dumps(payload), qos=0)
|
||||||
last_tgid = current_tgid
|
last_tgid = current_tgid
|
||||||
last_metadata = current_meta
|
last_metadata = current_meta
|
||||||
@@ -392,30 +263,12 @@ async def mqtt_lifecycle_manager():
|
|||||||
LOGGER.debug(f"Signal lost for TGID {last_tgid}. Starting debounce.")
|
LOGGER.debug(f"Signal lost for TGID {last_tgid}. Starting debounce.")
|
||||||
potential_end_time = now
|
potential_end_time = now
|
||||||
elif (now - potential_end_time).total_seconds() > DEBOUNCE_SECONDS:
|
elif (now - potential_end_time).total_seconds() > DEBOUNCE_SECONDS:
|
||||||
# --- END CALL (Debounce Expired) ---
|
|
||||||
await stop_recording()
|
|
||||||
|
|
||||||
# Stop Discord Transmission
|
|
||||||
discord_bot.stop_transmission()
|
|
||||||
|
|
||||||
audio_url = None
|
|
||||||
if current_call_id:
|
|
||||||
audio_url = await loop.run_in_executor(None, upload_audio, current_call_id)
|
|
||||||
|
|
||||||
LOGGER.debug(f"Call End (Debounce expired): TGID {last_tgid}")
|
LOGGER.debug(f"Call End (Debounce expired): TGID {last_tgid}")
|
||||||
payload = {
|
payload = {"node_id": NODE_ID, "timestamp": now.isoformat(), "event": "call_end", "metadata": last_metadata}
|
||||||
"node_id": NODE_ID,
|
|
||||||
"timestamp": now.isoformat(),
|
|
||||||
"event": "call_end",
|
|
||||||
"metadata": last_metadata,
|
|
||||||
"audio_url": audio_url,
|
|
||||||
"call_id": current_call_id
|
|
||||||
}
|
|
||||||
client.publish(f"nodes/{NODE_ID}/metadata", json.dumps(payload), qos=0)
|
client.publish(f"nodes/{NODE_ID}/metadata", json.dumps(payload), qos=0)
|
||||||
last_tgid = 0
|
last_tgid = 0
|
||||||
last_metadata = {}
|
last_metadata = {}
|
||||||
potential_end_time = None
|
potential_end_time = None
|
||||||
current_call_id = None
|
|
||||||
else:
|
else:
|
||||||
LOGGER.debug(f"OP25 API returned status: {response.status_code}")
|
LOGGER.debug(f"OP25 API returned status: {response.status_code}")
|
||||||
|
|
||||||
|
|||||||
@@ -21,10 +21,6 @@ services:
|
|||||||
- NODE_LONG=${NODE_LONG}
|
- NODE_LONG=${NODE_LONG}
|
||||||
- MQTT_BROKER=${MQTT_BROKER}
|
- MQTT_BROKER=${MQTT_BROKER}
|
||||||
- ICECAST_SERVER=${ICECAST_SERVER}
|
- ICECAST_SERVER=${ICECAST_SERVER}
|
||||||
- AUDIO_BUCKET=${AUDIO_BUCKET}
|
|
||||||
- HTTP_SERVER_PROTOCOL=${HTTP_SERVER_PROTOCOL}
|
|
||||||
- HTTP_SERVER_ADDRESS=${HTTP_SERVER_ADDRESS}
|
|
||||||
- HTTP_SERVER_PORT=${HTTP_SERVER_PORT}
|
|
||||||
networks:
|
networks:
|
||||||
- radio-shared-net
|
- radio-shared-net
|
||||||
|
|
||||||
|
|||||||
316
tests/test_op25_controller.py
Normal file
316
tests/test_op25_controller.py
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from unittest.mock import patch, MagicMock, mock_open, ANY
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import types
|
||||||
|
from typing import List, Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
# The router is included in the main app, so we test through it.
|
||||||
|
# We need to adjust the python path for imports to work correctly
|
||||||
|
import sys
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'app')))
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
||||||
|
|
||||||
|
# --- MOCK MODELS ---
|
||||||
|
# The actual models.models file has a NameError (IcecastConfig used before definition).
|
||||||
|
# Since we cannot edit the source code, we mock the module here to allow tests to run.
|
||||||
|
mock_models = types.ModuleType("models.models")
|
||||||
|
|
||||||
|
class MockTerminalConfig(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class MockTalkgroupTag(BaseModel):
|
||||||
|
tagDec: int
|
||||||
|
tagName: str
|
||||||
|
|
||||||
|
class MockDecodeMode:
|
||||||
|
P25 = "P25"
|
||||||
|
ANALOG = "ANALOG"
|
||||||
|
|
||||||
|
class MockIcecastConfig(BaseModel):
|
||||||
|
icecast_host: str
|
||||||
|
icecast_port: int
|
||||||
|
icecast_mountpoint: str
|
||||||
|
icecast_password: str
|
||||||
|
|
||||||
|
class MockAnalogConfig(BaseModel):
|
||||||
|
systemName: str
|
||||||
|
frequency: str
|
||||||
|
nbfmSquelch: int
|
||||||
|
|
||||||
|
class MockConfigGenerator(BaseModel):
|
||||||
|
type: str
|
||||||
|
systemName: str
|
||||||
|
channels: Optional[List[str]] = None
|
||||||
|
tags: Optional[List[MockTalkgroupTag]] = None
|
||||||
|
whitelist: Optional[str] = None
|
||||||
|
icecastConfig: Optional[MockIcecastConfig] = None
|
||||||
|
config: Optional[MockAnalogConfig] = None
|
||||||
|
|
||||||
|
class MockChannelConfig(BaseModel):
|
||||||
|
name: Optional[str] = None
|
||||||
|
trunking_sysname: Optional[str] = None
|
||||||
|
enable_analog: Optional[str] = None
|
||||||
|
demod_type: Optional[str] = None
|
||||||
|
cqpsk_tracking: Optional[bool] = None
|
||||||
|
filter_type: Optional[str] = None
|
||||||
|
meta_stream_name: Optional[str] = None
|
||||||
|
channelName: Optional[str] = None
|
||||||
|
enableAnalog: Optional[str] = None
|
||||||
|
demodType: Optional[str] = None
|
||||||
|
frequency: Optional[str] = None
|
||||||
|
filterType: Optional[str] = None
|
||||||
|
nbfmSquelch: Optional[int] = None
|
||||||
|
|
||||||
|
class MockDeviceConfig(BaseModel):
|
||||||
|
gain: Optional[str] = None
|
||||||
|
|
||||||
|
class MockTrunkingChannelConfig(BaseModel):
|
||||||
|
sysname: str
|
||||||
|
control_channel_list: str
|
||||||
|
tagsFile: str
|
||||||
|
whitelist: str
|
||||||
|
|
||||||
|
class MockTrunkingConfig(BaseModel):
|
||||||
|
module: str
|
||||||
|
chans: List[MockTrunkingChannelConfig]
|
||||||
|
|
||||||
|
class MockMetadataStreamConfig(BaseModel):
|
||||||
|
stream_name: str
|
||||||
|
icecastServerAddress: str
|
||||||
|
icecastMountpoint: str
|
||||||
|
icecastPass: str
|
||||||
|
|
||||||
|
class MockMetadataConfig(BaseModel):
|
||||||
|
streams: List[MockMetadataStreamConfig]
|
||||||
|
|
||||||
|
|
||||||
|
mock_models.ConfigGenerator = MockConfigGenerator
|
||||||
|
mock_models.DecodeMode = MockDecodeMode
|
||||||
|
mock_models.ChannelConfig = MockChannelConfig
|
||||||
|
mock_models.DeviceConfig = MockDeviceConfig
|
||||||
|
mock_models.TrunkingConfig = MockTrunkingConfig
|
||||||
|
mock_models.TrunkingChannelConfig = MockTrunkingChannelConfig
|
||||||
|
mock_models.TerminalConfig = MockTerminalConfig
|
||||||
|
mock_models.MetadataConfig = MockMetadataConfig
|
||||||
|
mock_models.MetadataStreamConfig = MockMetadataStreamConfig
|
||||||
|
mock_models.IcecastConfig = MockIcecastConfig
|
||||||
|
mock_models.TalkgroupTag = MockTalkgroupTag
|
||||||
|
|
||||||
|
sys.modules["models.models"] = mock_models
|
||||||
|
sys.modules["models"] = types.ModuleType("models")
|
||||||
|
sys.modules["models"].models = mock_models
|
||||||
|
# -------------------
|
||||||
|
|
||||||
|
from app.node_main import app
|
||||||
|
|
||||||
|
# Use a client to make requests to the app
|
||||||
|
client = TestClient(app)
|
||||||
|
|
||||||
|
# Define a sample P25 config payload for testing
|
||||||
|
SAMPLE_P25_CONFIG = {
|
||||||
|
"type": "P25",
|
||||||
|
"systemName": "TestSystem",
|
||||||
|
"channels": ["851.12345", "852.67890"],
|
||||||
|
"tags": [{"tagDec": 101, "tagName": "Group A"}, {"tagDec": 102, "tagName": "Group B"}],
|
||||||
|
"whitelist": "101",
|
||||||
|
"icecastConfig": {
|
||||||
|
"icecast_host": "localhost",
|
||||||
|
"icecast_port": 8000,
|
||||||
|
"icecast_mountpoint": "test",
|
||||||
|
"icecast_password": "hackme"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def reset_and_mock_globals(monkeypatch):
|
||||||
|
"""
|
||||||
|
Fixture to reset the global op25_process state and mock dependencies
|
||||||
|
before each test, ensuring test isolation.
|
||||||
|
"""
|
||||||
|
# Reset the global process variable in the controller module
|
||||||
|
monkeypatch.setattr("routers.op25_controller.op25_process", None)
|
||||||
|
|
||||||
|
# Mock asyncio.sleep to prevent tests from actually waiting
|
||||||
|
mock_sleep = MagicMock()
|
||||||
|
monkeypatch.setattr("asyncio.sleep", mock_sleep)
|
||||||
|
|
||||||
|
# Mock os functions related to process groups
|
||||||
|
monkeypatch.setattr("os.killpg", MagicMock())
|
||||||
|
monkeypatch.setattr("os.getpgid", MagicMock(return_value=12345))
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.subprocess.Popen")
|
||||||
|
def test_start_op25_success(mock_popen):
|
||||||
|
"""Test the /start endpoint successfully starts the process."""
|
||||||
|
mock_process = MagicMock()
|
||||||
|
mock_process.pid = 12345
|
||||||
|
mock_popen.return_value = mock_process
|
||||||
|
|
||||||
|
response = client.post("/op25/start")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"status": "OP25 started"}
|
||||||
|
mock_popen.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.subprocess.Popen", side_effect=Exception("Popen failed"))
|
||||||
|
def test_start_op25_failure(mock_popen):
|
||||||
|
"""Test the /start endpoint when Popen raises an exception."""
|
||||||
|
response = client.post("/op25/start")
|
||||||
|
assert response.status_code == 500
|
||||||
|
assert "Failed to start OP25" in response.json()["detail"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_stop_op25_not_running():
|
||||||
|
"""Test the /stop endpoint when the process is not running."""
|
||||||
|
response = client.post("/op25/stop")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"status": "OP25 was not running"}
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.subprocess.Popen")
|
||||||
|
def test_stop_op25_success(mock_popen, monkeypatch):
|
||||||
|
"""Test the /stop endpoint successfully stops a running process."""
|
||||||
|
mock_process = MagicMock()
|
||||||
|
mock_process.pid = 12345
|
||||||
|
mock_process.poll.return_value = None # Indicates it's running
|
||||||
|
monkeypatch.setattr("routers.op25_controller.op25_process", mock_process)
|
||||||
|
|
||||||
|
response = client.post("/op25/stop")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"status": "OP25 stopped"}
|
||||||
|
os.killpg.assert_called_with(os.getpgid(mock_process.pid), ANY)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_status_not_running():
|
||||||
|
"""Test the /status endpoint when the process is not running."""
|
||||||
|
response = client.get("/op25/status")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["is_running"] is False
|
||||||
|
assert data["pid"] is None
|
||||||
|
assert data["active_system"] is None
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.get_current_system_from_config", return_value="TestSystem")
|
||||||
|
@patch("routers.op25_controller.subprocess.Popen")
|
||||||
|
def test_get_status_running(mock_popen, mock_get_system, monkeypatch):
|
||||||
|
"""Test the /status endpoint when the process is running."""
|
||||||
|
mock_process = MagicMock()
|
||||||
|
mock_process.pid = 12345
|
||||||
|
mock_process.poll.return_value = None # Running
|
||||||
|
monkeypatch.setattr("routers.op25_controller.op25_process", mock_process)
|
||||||
|
|
||||||
|
response = client.get("/op25/status")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["is_running"] is True
|
||||||
|
assert data["pid"] == 12345
|
||||||
|
assert data["active_system"] == "TestSystem"
|
||||||
|
mock_get_system.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@patch("builtins.open", new_callable=mock_open)
|
||||||
|
@patch("routers.op25_controller.json.dump")
|
||||||
|
@patch("routers.op25_controller.save_talkgroup_tags")
|
||||||
|
@patch("routers.op25_controller.save_whitelist")
|
||||||
|
@patch("routers.op25_controller.generate_liquid_script")
|
||||||
|
@patch("routers.op25_controller.subprocess.Popen")
|
||||||
|
def test_set_active_config_no_restart(mock_popen, mock_liquid, mock_white, mock_tags, mock_dump, mock_file):
|
||||||
|
"""Test setting active config without restarting the radio."""
|
||||||
|
response = client.post("/op25/set_active_config?restart=false", json=SAMPLE_P25_CONFIG)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"message": "Active configuration updated", "radio_restarted": False}
|
||||||
|
|
||||||
|
# Verify config files were written
|
||||||
|
mock_file.assert_called_with('/configs/active.cfg.json', 'w')
|
||||||
|
mock_dump.assert_called_once()
|
||||||
|
mock_tags.assert_called_with([MockTalkgroupTag(**t) for t in SAMPLE_P25_CONFIG["tags"]])
|
||||||
|
mock_white.assert_called_with(SAMPLE_P25_CONFIG["whitelist"])
|
||||||
|
mock_liquid.assert_called_with(MockIcecastConfig(**SAMPLE_P25_CONFIG["icecastConfig"]))
|
||||||
|
|
||||||
|
# Verify radio was NOT started/stopped
|
||||||
|
mock_popen.assert_not_called()
|
||||||
|
os.killpg.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.activate_config_from_library", return_value=True)
|
||||||
|
@patch("routers.op25_controller.save_config_to_library")
|
||||||
|
@patch("routers.op25_controller.save_library_sidecars")
|
||||||
|
@patch("routers.op25_controller.subprocess.Popen")
|
||||||
|
def test_set_active_config_with_save_to_library(mock_popen, mock_save_sidecars, mock_save_lib, mock_activate):
|
||||||
|
"""Test setting active config and saving it to the library."""
|
||||||
|
library_name = "MyNewSystem"
|
||||||
|
response = client.post(
|
||||||
|
f"/op25/set_active_config?restart=true&save_to_library_name={library_name}",
|
||||||
|
json=SAMPLE_P25_CONFIG
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["radio_restarted"] is True
|
||||||
|
|
||||||
|
# Verify it was saved and then activated from the library
|
||||||
|
mock_save_lib.assert_called_with(library_name, ANY)
|
||||||
|
mock_save_sidecars.assert_called_with(library_name, ANY)
|
||||||
|
mock_activate.assert_called_with(library_name)
|
||||||
|
|
||||||
|
# Verify radio was restarted
|
||||||
|
assert mock_popen.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.activate_config_from_library", return_value=True)
|
||||||
|
@patch("routers.op25_controller.subprocess.Popen")
|
||||||
|
def test_load_from_library_success(mock_popen, mock_activate):
|
||||||
|
"""Test loading a configuration from the library."""
|
||||||
|
system_name = "ExistingSystem"
|
||||||
|
response = client.post(f"/op25/load_from_library?system_name={system_name}")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"status": f"Loaded and started library config: {system_name}"}
|
||||||
|
|
||||||
|
# Verify activation and restart
|
||||||
|
mock_activate.assert_called_with(system_name)
|
||||||
|
assert mock_popen.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.activate_config_from_library", return_value=False)
|
||||||
|
def test_load_from_library_not_found(mock_activate):
|
||||||
|
"""Test loading a non-existent configuration from the library."""
|
||||||
|
system_name = "NotFoundSystem"
|
||||||
|
response = client.post(f"/op25/load_from_library?system_name={system_name}")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
assert "not found in library" in response.json()["detail"]
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.save_config_to_library", return_value=True)
|
||||||
|
@patch("routers.op25_controller.save_library_sidecars")
|
||||||
|
def test_save_to_library(mock_save_sidecars, mock_save_lib):
|
||||||
|
"""Test saving a configuration directly to the library."""
|
||||||
|
system_name = "NewLibSystem"
|
||||||
|
response = client.post(f"/op25/save_to_library?system_name={system_name}", json=SAMPLE_P25_CONFIG)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"status": f"Config saved as {system_name}"}
|
||||||
|
mock_save_lib.assert_called_with(system_name, ANY)
|
||||||
|
mock_save_sidecars.assert_called_with(system_name, ANY)
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.scan_local_library", return_value=["System1.json", "System2.json"])
|
||||||
|
def test_get_library(mock_scan):
|
||||||
|
"""Test the /library endpoint."""
|
||||||
|
response = client.get("/op25/library")
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == ["System1.json", "System2.json"]
|
||||||
|
mock_scan.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@patch("routers.op25_controller.build_op25_config", side_effect=Exception("Build failed"))
|
||||||
|
def test_set_active_config_build_failure(mock_build):
|
||||||
|
"""Test error handling when config building fails."""
|
||||||
|
response = client.post("/op25/set_active_config", json=SAMPLE_P25_CONFIG)
|
||||||
|
assert response.status_code == 500
|
||||||
|
assert "Configuration error: Build failed" in response.json()["detail"]
|
||||||
Reference in New Issue
Block a user