Merge pull request 'Init OP25 crash detection' (#20) from #16-op25-crash-detection into main

Reviewed-on: #20
This commit is contained in:
2024-09-15 01:11:27 -04:00
5 changed files with 100 additions and 21 deletions

View File

@@ -12,6 +12,7 @@
"type": "module",
"dependencies": {
"@discordjs/voice": "^0.17.0",
"axios": "^1.7.7",
"convert-units": "^2.3.4",
"discord.js": "^14.15.3",
"dotenv": "^16.4.5",

View File

@@ -94,8 +94,8 @@ rm -rf /usr/lib/python3.11/EXTERNALLY-MANAGED
# Getting the Python DAB
echo "Installing PDAB and Dependencies"
git clone -b DRBv3 https://git.vpn.cusano.net/logan/Python-Discord-Audio-Bot.git ./discordAudioBot/pdab
pip3 install -r ./discordAudioBot/pdab/requirements.txt
git clone -b DRBv3 https://git.vpn.cusano.net/logan/Python-Discord-Audio-Bot.git ./pdab
pip3 install -r ./pdab/requirements.txt
# Create a systemd service file for the DRB Client
echo "Adding DRB Node service..."

View File

@@ -25,7 +25,7 @@ let botCallback;
export const initDiscordBotClient = (clientId, callback, runPDAB = true) => {
botCallback = callback;
if (runPDAB) launchProcess("python", [join(__dirname, "./pdab/main.py"), process.env.AUDIO_DEVICE_ID, clientId, port], false, false, join(__dirname, "./pdab"));
if (runPDAB) launchProcess("python", [join(__dirname, "../../pdab/main.py"), process.env.AUDIO_DEVICE_ID, clientId, port], false, false, join(__dirname, "../../pdab"));
pdabProcess = true; // TODO - Make this more dynamic
}

View File

@@ -1,8 +1,8 @@
import { DebugBuilder } from "./debugger.mjs";
const log = new DebugBuilder("client", "selfUpdater");
import simpleGit from 'simple-git';
import { restartService } from './serviceHandler.mjs'
import { launchProcess } from './subprocessHandler.mjs'
import { restartService } from './serviceHandler.mjs';
import { launchProcess } from './subprocessHandler.mjs';
const git = simpleGit();
@@ -12,27 +12,31 @@ export const checkForUpdates = async () => {
// Fetch remote changes
await git.fetch();
// Get the latest commit hash
const latestCommitHash = await git.revparse(['@{u}']);
// Get the current branch
const currentBranch = await git.revparse(['--abbrev-ref', 'HEAD']);
log.INFO(`Current branch is ${currentBranch}`);
// Get the latest commit hash for the current branch
const latestCommitHash = await git.revparse([`${currentBranch}@{u}`]);
// Compare with the local commit hash
const localCommitHash = await git.revparse(['HEAD']);
if (latestCommitHash !== localCommitHash) {
log.INFO('An update is available. Updating...');
log.INFO(`An update is available on branch ${currentBranch}. Updating...`);
// Check if there have been any changes to the code
const gitStatus = await git.status()
const gitStatus = await git.status();
log.INFO(gitStatus);
if (gitStatus.modified.length > 0){
// There is locally modified code
log.INFO("There is locally modified code, resetting...");
log.INFO("There is locally modified code, stashing changes...");
await git.stash();
await git.reset('hard', ['origin/master']);
}
// Pull the latest changes from the remote repository
await git.pull();
// Ensure we are on the correct branch and pull the latest changes
await git.checkout(currentBranch);
await git.pull('origin', currentBranch);
// Run the post-update script
log.INFO('Running post-update script...');
@@ -42,10 +46,10 @@ export const checkForUpdates = async () => {
log.INFO('Update completed successfully. Restarting the application...');
restartApplication();
return true
return true;
} else {
log.INFO('The application is up to date.');
return false
return false;
}
} catch (error) {
log.ERROR('Error checking for updates:', error);
@@ -53,7 +57,7 @@ export const checkForUpdates = async () => {
}
// Function to restart the application
export const restartApplication = () => {
export const restartApplication = () => {
log.INFO('Restarting the application...');
restartService('discord-radio-bot');
}
};

View File

@@ -3,11 +3,65 @@ const log = new DebugBuilder("client", "op25Handler");
import { P25ConfigGenerator, NBFMConfigGenerator } from './modules/op25ConfigGenerators.mjs';
import { getAllPresets } from '../modules/radioPresetHandler.mjs';
import { startService, stopService } from '../modules/serviceHandler.mjs';
import axios from 'axios'; // Import axios for HTTP requests
import dotenv from 'dotenv';
dotenv.config()
dotenv.config();
let currentSystem = undefined;
let crashDetectionInterval; // Variable to store the crash detection interval ID
// Sleep utility to add delays between retries
const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms));
/**
* Checks the health of the OP25 web portal by making an HTTP HEAD request.
* If the portal does not respond or there is an issue, retries a specified number of times.
* If all retry attempts fail, it restarts the OP25 service.
*
* @async
* @function checkServiceHealth
* @returns {Promise<void>} Resolves if the web portal is healthy or after the restart process is triggered.
* @throws Will log errors related to the health check or service restart.
*/
const checkServiceHealth = async () => {
try {
log.INFO("Checking OP25 web portal health...");
// Perform an HTTP HEAD request to the web portal with a 5-second timeout
await axios({ method: "get", url: 'http://localhost:8081', timeout: 5000 });
log.INFO("Web portal is healthy.");
} catch (error) {
if (error.code === 'ECONNABORTED') {
log.ERROR("Request timed out. The web portal took too long to respond.");
} else if (error.response) {
log.ERROR(`Web portal responded with status ${error.response.status}: ${error.response.statusText}`);
} else if (error.request) {
log.ERROR("No response received from web portal.");
} else {
log.ERROR(`Unexpected error occurred: ${error.message}`);
}
// Retry mechanism
const retryAttempts = 3;
const delayBetweenRetries = 3000; // 3 seconds delay
for (let i = 1; i <= retryAttempts; i++) {
log.INFO(`Retrying to check web portal health... Attempt ${i}/${retryAttempts}`);
try {
await sleep(delayBetweenRetries); // Add delay before retrying
await axios({ method: "get", url: 'http://localhost:8081', timeout: 5000 });
log.INFO("Web portal is healthy on retry.");
return;
} catch (retryError) {
log.ERROR(`Retry ${i} failed: ${retryError.message}`);
if (i === retryAttempts) {
log.ERROR("All retry attempts failed. Restarting the service...");
await restartOp25();
}
}
}
}
};
/**
* Creates configuration based on the preset and restarts the OP25 service.
@@ -41,9 +95,7 @@ const createConfigAndRestartService = async (systemName, preset) => {
const op25ConfigPath = `${op25FilePath}${op25FilePath.endsWith('/') ? 'active.cfg.json' : '/active.cfg.json'}`;
await generator.exportToFile(op25ConfigPath);
// Restart the service
await stopService('op25-multi_rx');
await startService('op25-multi_rx');
await restartOp25();
};
/**
@@ -65,8 +117,23 @@ export const openOP25 = async (systemName) => {
}
await createConfigAndRestartService(systemName, preset);
// Start OP25 crash detection
if (!crashDetectionInterval) {
crashDetectionInterval = setInterval(checkServiceHealth, 30000); // Check every 30 seconds
log.INFO("Started crash detection.");
}
};
/**
* Restarts the OP25 service without changing the config.
* @returns {Promise<void>}
*/
export const restartOp25 = async () => {
// Restart the service
await stopService('op25-multi_rx');
await startService('op25-multi_rx');
}
/**
* Closes the OP25 service.
@@ -75,6 +142,13 @@ export const openOP25 = async (systemName) => {
export const closeOP25 = async () => {
currentSystem = undefined;
await stopService('op25-multi_rx');
// Stop crash detection
if (crashDetectionInterval) {
clearInterval(crashDetectionInterval);
crashDetectionInterval = null;
log.INFO("Stopped crash detection.");
}
};
/**