12 Commits

Author SHA1 Message Date
Logan Cusano
48beb79922 Placement error, actual last attempt
Some checks failed
Python Application Tests / build (3.13) (pull_request) Failing after 8s
2025-12-29 20:11:06 -05:00
Logan Cusano
98727615a3 Last fix attempt
Some checks failed
Python Application Tests / build (3.13) (pull_request) Failing after 8s
2025-12-29 20:09:48 -05:00
Logan Cusano
706f5a0e20 last attempt
Some checks failed
Python Application Tests / build (3.13) (pull_request) Failing after 8s
2025-12-29 19:58:16 -05:00
Logan Cusano
1be65c226f Update test with mock models
Some checks failed
Python Application Tests / build (3.13) (pull_request) Failing after 8s
2025-12-29 19:51:56 -05:00
Logan Cusano
313da3684d undo mistake
Some checks failed
Python Application Tests / build (3.13) (pull_request) Failing after 7s
2025-12-29 19:46:59 -05:00
Logan Cusano
80f5eb3f50 Fix test path
Some checks failed
Python Application Tests / build (3.13) (pull_request) Failing after 8s
2025-12-29 19:43:11 -05:00
Logan Cusano
497cbccc80 init testing
Some checks failed
Python Application Tests / build (3.13) (pull_request) Failing after 53s
2025-12-29 19:18:13 -05:00
de143a67fe Merge pull request 'Implement Metadata Watcher' (#1) from metadata-watcher into main
All checks were successful
release-tag / release-image (push) Successful in 1h26m24s
Reviewed-on: #1
2025-12-29 19:04:07 -05:00
Logan Cusano
ee9ce0e140 Add the radio ID to the metadata payload to track who is talking, not just what system 2025-12-29 19:02:51 -05:00
Logan Cusano
ca984be293 Implement debug logging into metadata watcher 2025-12-29 15:48:45 -05:00
Logan Cusano
b8ee991192 Update port in docker compose and update metadata watcher function to use correct OP@5 endpoint 2025-12-29 15:23:18 -05:00
Logan Cusano
0a6b565651 Fix bug in op25 config where it would not create liquidsoap if saved config was loaded 2025-12-29 15:06:48 -05:00
5 changed files with 421 additions and 33 deletions

View File

@@ -0,0 +1,37 @@
name: Python Application Tests
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "*" ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.13"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
# Install test dependencies
pip install pytest pytest-asyncio httpx
# Install application dependencies (assuming you have a requirements.txt)
# If you don't have one, create it with `pip freeze > requirements.txt`
# For now, we'll install the dependencies we know are needed from context
pip install fastapi "uvicorn[standard]" paho-mqtt requests
- name: Test with pytest
run: |
pytest

View File

@@ -3,9 +3,10 @@ import json
import os
import shutil
from pathlib import Path
from models.models import TalkgroupTag
from models.models import TalkgroupTag, IcecastConfig
from typing import List, Dict
from internal.logger import create_logger
from internal.liquidsoap_config_utils import generate_liquid_script
LOGGER = create_logger(__name__)
@@ -66,6 +67,27 @@ def activate_config_from_library(system_name: str) -> bool:
if src_whitelist.exists():
shutil.copy2(src_whitelist, config_path / "active.cfg.whitelist.tsv")
# Generate Liquidsoap Script by reading the activated config
with open(dst, 'r') as f:
data = json.load(f)
if "trunking" in data and "metadata" in data:
streams = data.get("metadata", {}).get("streams", [])
if streams:
stream = streams[0]
address = stream.get("icecastServerAddress", "127.0.0.1:8000")
host, port = address.split(":") if ":" in address else (address, 8000)
ice_config = IcecastConfig(
icecast_host=host,
icecast_port=int(port),
icecast_mountpoint=stream.get("icecastMountpoint", "/stream"),
icecast_password=stream.get("icecastPass", "hackme"),
icecast_description="OP25 Stream",
icecast_genre="Scanner"
)
generate_liquid_script(ice_config)
return True
except Exception as e:
LOGGER.error(f"Failed to copy config: {e}")

View File

@@ -186,12 +186,16 @@ async def mqtt_lifecycle_manager():
async def metadata_watcher():
"""
Polls OP25 HTTP terminal for metadata and publishes events to MQTT.
Corrected to use the POST-based command API found in the HAR capture.
"""
last_tgid = 0
last_metadata = {}
potential_end_time = None
DEBOUNCE_SECONDS = 2.5
OP25_DATA_URL = "http://127.0.0.1:8081/data.json"
OP25_DATA_URL = "http://127.0.0.1:8081/"
# This is the specific payload the OP25 web interface uses [cite: 45562, 45563]
COMMAND_PAYLOAD = [{"command": "update", "arg1": 0, "arg2": 0}]
while True:
if not MQTT_CONNECTED:
@@ -199,52 +203,56 @@ async def mqtt_lifecycle_manager():
continue
try:
# Run blocking request in executor to avoid blocking the asyncio loop
# Run blocking POST request in executor
loop = asyncio.get_running_loop()
response = await loop.run_in_executor(None, lambda: requests.get(OP25_DATA_URL, timeout=0.5))
response = await loop.run_in_executor(
None,
lambda: requests.post(OP25_DATA_URL, json=COMMAND_PAYLOAD, timeout=0.5)
)
if response.status_code == 200:
data = response.json()
LOGGER.debug(f"Response from OP25 API: {data}")
current_tgid = 0
current_meta = {}
# Handle multi_rx list or single dict structure
if isinstance(data, list):
for ch in data:
t = ch.get("tgid", 0)
# The response is an array of update objects
for item in data:
if item.get("json_type") == "channel_update":
# The terminal provides channel info keyed by channel index (e.g., "0")
# We look for the first channel that has an active TGID
for key in item:
if key.isdigit():
ch = item[key]
t = ch.get("tgid")
# OP25 returns null or 0 when no talkgroup is active
if t and int(t) > 0:
current_tgid = int(t)
current_meta = {
"tgid": str(t),
"rid": str(ch.get("srcaddr", "")).strip(),
"alpha_tag": str(ch.get("tag", "")).strip(),
"frequency": str(ch.get("freq", 0)),
"sysname": str(ch.get("system", "")).strip()
}
break
elif isinstance(data, dict):
t = data.get("tgid", 0)
if t and int(t) > 0:
current_tgid = int(t)
current_meta = {
"tgid": str(t),
"alpha_tag": str(data.get("tag", "")).strip(),
"frequency": str(data.get("freq", 0)),
"sysname": str(data.get("system", "")).strip()
}
if current_tgid: break
now = datetime.now()
# Logic for handling call start/end events
if current_tgid != 0:
potential_end_time = None # Reset debounce
potential_end_time = None
if current_tgid != last_tgid:
if last_tgid != 0:
# End previous call immediately if switching channels
LOGGER.debug(f"Switching TGID: {last_tgid} -> {current_tgid}")
payload = {"node_id": NODE_ID, "timestamp": now.isoformat(), "event": "call_end", "metadata": last_metadata}
client.publish(f"nodes/{NODE_ID}/metadata", json.dumps(payload), qos=0)
# Start new call
LOGGER.debug(f"Call Start: TGID {current_tgid} ({current_meta.get('alpha_tag')})")
payload = {"node_id": NODE_ID, "timestamp": now.isoformat(), "event": "call_start", "metadata": current_meta}
client.publish(f"nodes/{NODE_ID}/metadata", json.dumps(payload), qos=0)
last_tgid = current_tgid
@@ -252,16 +260,20 @@ async def mqtt_lifecycle_manager():
elif last_tgid != 0:
if potential_end_time is None:
LOGGER.debug(f"Signal lost for TGID {last_tgid}. Starting debounce.")
potential_end_time = now
elif (now - potential_end_time).total_seconds() > DEBOUNCE_SECONDS:
LOGGER.debug(f"Call End (Debounce expired): TGID {last_tgid}")
payload = {"node_id": NODE_ID, "timestamp": now.isoformat(), "event": "call_end", "metadata": last_metadata}
client.publish(f"nodes/{NODE_ID}/metadata", json.dumps(payload), qos=0)
last_tgid = 0
last_metadata = {}
potential_end_time = None
else:
LOGGER.debug(f"OP25 API returned status: {response.status_code}")
except Exception:
pass # OP25 might be restarting or busy
except Exception as e:
LOGGER.warning(f"Metadata watcher error: {e}")
await asyncio.sleep(0.25)

View File

@@ -7,6 +7,7 @@ services:
restart: unless-stopped
ports:
- 8001:8001
- 8081:8081
devices:
- "/dev/bus/usb:/dev/bus/usb"
volumes:

View File

@@ -0,0 +1,316 @@
import pytest
from fastapi.testclient import TestClient
from unittest.mock import patch, MagicMock, mock_open, ANY
import json
import os
import types
from typing import List, Optional
from pydantic import BaseModel
# The router is included in the main app, so we test through it.
# We need to adjust the python path for imports to work correctly
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'app')))
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
# --- MOCK MODELS ---
# The actual models.models file has a NameError (IcecastConfig used before definition).
# Since we cannot edit the source code, we mock the module here to allow tests to run.
mock_models = types.ModuleType("models.models")
class MockTerminalConfig(BaseModel):
pass
class MockTalkgroupTag(BaseModel):
tagDec: int
tagName: str
class MockDecodeMode:
P25 = "P25"
ANALOG = "ANALOG"
class MockIcecastConfig(BaseModel):
icecast_host: str
icecast_port: int
icecast_mountpoint: str
icecast_password: str
class MockAnalogConfig(BaseModel):
systemName: str
frequency: str
nbfmSquelch: int
class MockConfigGenerator(BaseModel):
type: str
systemName: str
channels: Optional[List[str]] = None
tags: Optional[List[MockTalkgroupTag]] = None
whitelist: Optional[str] = None
icecastConfig: Optional[MockIcecastConfig] = None
config: Optional[MockAnalogConfig] = None
class MockChannelConfig(BaseModel):
name: Optional[str] = None
trunking_sysname: Optional[str] = None
enable_analog: Optional[str] = None
demod_type: Optional[str] = None
cqpsk_tracking: Optional[bool] = None
filter_type: Optional[str] = None
meta_stream_name: Optional[str] = None
channelName: Optional[str] = None
enableAnalog: Optional[str] = None
demodType: Optional[str] = None
frequency: Optional[str] = None
filterType: Optional[str] = None
nbfmSquelch: Optional[int] = None
class MockDeviceConfig(BaseModel):
gain: Optional[str] = None
class MockTrunkingChannelConfig(BaseModel):
sysname: str
control_channel_list: str
tagsFile: str
whitelist: str
class MockTrunkingConfig(BaseModel):
module: str
chans: List[MockTrunkingChannelConfig]
class MockMetadataStreamConfig(BaseModel):
stream_name: str
icecastServerAddress: str
icecastMountpoint: str
icecastPass: str
class MockMetadataConfig(BaseModel):
streams: List[MockMetadataStreamConfig]
mock_models.ConfigGenerator = MockConfigGenerator
mock_models.DecodeMode = MockDecodeMode
mock_models.ChannelConfig = MockChannelConfig
mock_models.DeviceConfig = MockDeviceConfig
mock_models.TrunkingConfig = MockTrunkingConfig
mock_models.TrunkingChannelConfig = MockTrunkingChannelConfig
mock_models.TerminalConfig = MockTerminalConfig
mock_models.MetadataConfig = MockMetadataConfig
mock_models.MetadataStreamConfig = MockMetadataStreamConfig
mock_models.IcecastConfig = MockIcecastConfig
mock_models.TalkgroupTag = MockTalkgroupTag
sys.modules["models.models"] = mock_models
sys.modules["models"] = types.ModuleType("models")
sys.modules["models"].models = mock_models
# -------------------
from app.node_main import app
# Use a client to make requests to the app
client = TestClient(app)
# Define a sample P25 config payload for testing
SAMPLE_P25_CONFIG = {
"type": "P25",
"systemName": "TestSystem",
"channels": ["851.12345", "852.67890"],
"tags": [{"tagDec": 101, "tagName": "Group A"}, {"tagDec": 102, "tagName": "Group B"}],
"whitelist": "101",
"icecastConfig": {
"icecast_host": "localhost",
"icecast_port": 8000,
"icecast_mountpoint": "test",
"icecast_password": "hackme"
}
}
@pytest.fixture(autouse=True)
def reset_and_mock_globals(monkeypatch):
"""
Fixture to reset the global op25_process state and mock dependencies
before each test, ensuring test isolation.
"""
# Reset the global process variable in the controller module
monkeypatch.setattr("routers.op25_controller.op25_process", None)
# Mock asyncio.sleep to prevent tests from actually waiting
mock_sleep = MagicMock()
monkeypatch.setattr("asyncio.sleep", mock_sleep)
# Mock os functions related to process groups
monkeypatch.setattr("os.killpg", MagicMock())
monkeypatch.setattr("os.getpgid", MagicMock(return_value=12345))
@patch("routers.op25_controller.subprocess.Popen")
def test_start_op25_success(mock_popen):
"""Test the /start endpoint successfully starts the process."""
mock_process = MagicMock()
mock_process.pid = 12345
mock_popen.return_value = mock_process
response = client.post("/op25/start")
assert response.status_code == 200
assert response.json() == {"status": "OP25 started"}
mock_popen.assert_called_once()
@patch("routers.op25_controller.subprocess.Popen", side_effect=Exception("Popen failed"))
def test_start_op25_failure(mock_popen):
"""Test the /start endpoint when Popen raises an exception."""
response = client.post("/op25/start")
assert response.status_code == 500
assert "Failed to start OP25" in response.json()["detail"]
def test_stop_op25_not_running():
"""Test the /stop endpoint when the process is not running."""
response = client.post("/op25/stop")
assert response.status_code == 200
assert response.json() == {"status": "OP25 was not running"}
@patch("routers.op25_controller.subprocess.Popen")
def test_stop_op25_success(mock_popen, monkeypatch):
"""Test the /stop endpoint successfully stops a running process."""
mock_process = MagicMock()
mock_process.pid = 12345
mock_process.poll.return_value = None # Indicates it's running
monkeypatch.setattr("routers.op25_controller.op25_process", mock_process)
response = client.post("/op25/stop")
assert response.status_code == 200
assert response.json() == {"status": "OP25 stopped"}
os.killpg.assert_called_with(os.getpgid(mock_process.pid), ANY)
def test_get_status_not_running():
"""Test the /status endpoint when the process is not running."""
response = client.get("/op25/status")
assert response.status_code == 200
data = response.json()
assert data["is_running"] is False
assert data["pid"] is None
assert data["active_system"] is None
@patch("routers.op25_controller.get_current_system_from_config", return_value="TestSystem")
@patch("routers.op25_controller.subprocess.Popen")
def test_get_status_running(mock_popen, mock_get_system, monkeypatch):
"""Test the /status endpoint when the process is running."""
mock_process = MagicMock()
mock_process.pid = 12345
mock_process.poll.return_value = None # Running
monkeypatch.setattr("routers.op25_controller.op25_process", mock_process)
response = client.get("/op25/status")
assert response.status_code == 200
data = response.json()
assert data["is_running"] is True
assert data["pid"] == 12345
assert data["active_system"] == "TestSystem"
mock_get_system.assert_called_once()
@patch("builtins.open", new_callable=mock_open)
@patch("routers.op25_controller.json.dump")
@patch("routers.op25_controller.save_talkgroup_tags")
@patch("routers.op25_controller.save_whitelist")
@patch("routers.op25_controller.generate_liquid_script")
@patch("routers.op25_controller.subprocess.Popen")
def test_set_active_config_no_restart(mock_popen, mock_liquid, mock_white, mock_tags, mock_dump, mock_file):
"""Test setting active config without restarting the radio."""
response = client.post("/op25/set_active_config?restart=false", json=SAMPLE_P25_CONFIG)
assert response.status_code == 200
assert response.json() == {"message": "Active configuration updated", "radio_restarted": False}
# Verify config files were written
mock_file.assert_called_with('/configs/active.cfg.json', 'w')
mock_dump.assert_called_once()
mock_tags.assert_called_with([MockTalkgroupTag(**t) for t in SAMPLE_P25_CONFIG["tags"]])
mock_white.assert_called_with(SAMPLE_P25_CONFIG["whitelist"])
mock_liquid.assert_called_with(MockIcecastConfig(**SAMPLE_P25_CONFIG["icecastConfig"]))
# Verify radio was NOT started/stopped
mock_popen.assert_not_called()
os.killpg.assert_not_called()
@patch("routers.op25_controller.activate_config_from_library", return_value=True)
@patch("routers.op25_controller.save_config_to_library")
@patch("routers.op25_controller.save_library_sidecars")
@patch("routers.op25_controller.subprocess.Popen")
def test_set_active_config_with_save_to_library(mock_popen, mock_save_sidecars, mock_save_lib, mock_activate):
"""Test setting active config and saving it to the library."""
library_name = "MyNewSystem"
response = client.post(
f"/op25/set_active_config?restart=true&save_to_library_name={library_name}",
json=SAMPLE_P25_CONFIG
)
assert response.status_code == 200
assert response.json()["radio_restarted"] is True
# Verify it was saved and then activated from the library
mock_save_lib.assert_called_with(library_name, ANY)
mock_save_sidecars.assert_called_with(library_name, ANY)
mock_activate.assert_called_with(library_name)
# Verify radio was restarted
assert mock_popen.call_count == 1
@patch("routers.op25_controller.activate_config_from_library", return_value=True)
@patch("routers.op25_controller.subprocess.Popen")
def test_load_from_library_success(mock_popen, mock_activate):
"""Test loading a configuration from the library."""
system_name = "ExistingSystem"
response = client.post(f"/op25/load_from_library?system_name={system_name}")
assert response.status_code == 200
assert response.json() == {"status": f"Loaded and started library config: {system_name}"}
# Verify activation and restart
mock_activate.assert_called_with(system_name)
assert mock_popen.call_count == 1
@patch("routers.op25_controller.activate_config_from_library", return_value=False)
def test_load_from_library_not_found(mock_activate):
"""Test loading a non-existent configuration from the library."""
system_name = "NotFoundSystem"
response = client.post(f"/op25/load_from_library?system_name={system_name}")
assert response.status_code == 404
assert "not found in library" in response.json()["detail"]
@patch("routers.op25_controller.save_config_to_library", return_value=True)
@patch("routers.op25_controller.save_library_sidecars")
def test_save_to_library(mock_save_sidecars, mock_save_lib):
"""Test saving a configuration directly to the library."""
system_name = "NewLibSystem"
response = client.post(f"/op25/save_to_library?system_name={system_name}", json=SAMPLE_P25_CONFIG)
assert response.status_code == 200
assert response.json() == {"status": f"Config saved as {system_name}"}
mock_save_lib.assert_called_with(system_name, ANY)
mock_save_sidecars.assert_called_with(system_name, ANY)
@patch("routers.op25_controller.scan_local_library", return_value=["System1.json", "System2.json"])
def test_get_library(mock_scan):
"""Test the /library endpoint."""
response = client.get("/op25/library")
assert response.status_code == 200
assert response.json() == ["System1.json", "System2.json"]
mock_scan.assert_called_once()
@patch("routers.op25_controller.build_op25_config", side_effect=Exception("Build failed"))
def test_set_active_config_build_failure(mock_build):
"""Test error handling when config building fails."""
response = client.post("/op25/set_active_config", json=SAMPLE_P25_CONFIG)
assert response.status_code == 500
assert "Configuration error: Build failed" in response.json()["detail"]