Compare commits
18 Commits
d372bcd8af
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bf461bd5a9 | ||
|
|
64bee8c561 | ||
|
|
b7147695ac | ||
|
|
d7da3c636c | ||
|
|
68b5e5436c | ||
|
|
46ff0f49e8 | ||
|
|
8a417d9ed7 | ||
|
|
8ff82a462b | ||
|
|
1aea162b8f | ||
|
|
edef8a4c7d | ||
|
|
c8b90ca220 | ||
|
|
ad09caac25 | ||
|
|
89d1cf3ee6 | ||
|
|
14693692d0 | ||
|
|
a86e4b2876 | ||
|
|
a7bcf971c4 | ||
|
|
f840d754ac | ||
|
|
cb8dfca8dd |
@@ -14,8 +14,8 @@ exports.RSSController = class RSSController {
|
||||
}
|
||||
|
||||
async start(){
|
||||
// Wait 30 seconds for the rest of the bot to start before starting rss feeds
|
||||
await new Promise(resolve => setTimeout(resolve, 30000));
|
||||
// Wait for the refresh period before starting rss feeds, so the rest of the bot can start
|
||||
await new Promise(resolve => setTimeout(resolve, refreshInterval));
|
||||
|
||||
log.INFO("Starting RSS Controller");
|
||||
// Get initial feeds before the starting the infinite loop
|
||||
|
||||
91
libCore.js
91
libCore.js
@@ -25,6 +25,52 @@ let parser = new Parser({
|
||||
var feedStorage = new FeedStorage();
|
||||
var postStorage = new PostStorage();
|
||||
|
||||
// Initiate a running array of objects to keep track of sources that have no feeds/posts
|
||||
/*
|
||||
var runningPostsToRemove = [{
|
||||
"{SOURCE URL}": {NUMBER OF TIMES IT'S BEEN REMOVED}
|
||||
}]
|
||||
*/
|
||||
var runningPostsToRemove = {};
|
||||
const sourceFailureLimit = process.env.SOURCE_FAILURE_LIMIT ?? 3;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} sourceURL
|
||||
*/
|
||||
exports.removeSource = function removeSource(sourceURL) {
|
||||
log.INFO("Removing source URL: ", sourceURL);
|
||||
if (!sourceURL in runningPostsToRemove) {runningPostsToRemove[sourceURL] = 1; return;}
|
||||
|
||||
if (runningPostsToRemove[sourceURL] < sourceFailureLimit) {runningPostsToRemove[sourceURL] += 1; return;}
|
||||
|
||||
feedStorage.getRecordBy('link', sourceURL, (err, record) => {
|
||||
if (err) log.ERROR("Error getting record from feedStorage", err);
|
||||
|
||||
if (!record) log.ERROR("No source returned from feedStorage");
|
||||
feedStorage.destroy(record.id, (err, results) => {
|
||||
if (err) log.ERROR("Error removing ID from results", err);
|
||||
|
||||
if (!results) log.WARN("No results from remove entry");
|
||||
|
||||
log.DEBUG("Source exceeded the limit of retries and has been removed", sourceURL);
|
||||
return;
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Unset a source URL from deletion if the source has not already been deleted
|
||||
* @param {*} sourceURL The source URL to be unset from deletion
|
||||
* @returns {*}
|
||||
*/
|
||||
exports.unsetRemoveSource = function unsetRemoveSource(sourceURL) {
|
||||
log.INFO("Unsetting source URL from deletion (if not already deleted): ", sourceURL);
|
||||
if (!sourceURL in runningPostsToRemove) return;
|
||||
|
||||
if (runningPostsToRemove[sourceURL] > sourceFailureLimit) return delete runningPostsToRemove[sourceURL];
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds or updates new source url to configured storage
|
||||
* @constructor
|
||||
@@ -105,29 +151,31 @@ exports.updateFeeds = (client) => {
|
||||
tempFeedStorage.getAllRecords(async (err, records) => {
|
||||
// Load the posts from each RSS source
|
||||
for (const source of records) {
|
||||
sourcePromiseArray.push(new Promise((resolve, reject) => {
|
||||
sourcePromiseArray.push(new Promise((resolve, reject) => {
|
||||
log.DEBUG('Record title: ', source.title);
|
||||
log.DEBUG('Record link: ', source.link);
|
||||
log.DEBUG('Record category: ', source.category);
|
||||
log.DEBUG('Record guild ID: ', source.guild_id);
|
||||
log.DEBUG('Record channel ID: ', source.channel_id);
|
||||
log.DEBUG('Record channel ID: ', source.channel_id);
|
||||
// Parse the RSS feed
|
||||
parser.parseURL(source.link, async (err, parsedFeed) => {
|
||||
if (err) {
|
||||
log.ERROR("Parser Error: ", source, err);
|
||||
log.ERROR("Parser Error: ", runningPostsToRemove, source, err);
|
||||
// Call the wrapper to make sure the site isn't just down at the time it checks and is back up the next time
|
||||
this.removeSource(source.link);
|
||||
reject;
|
||||
}
|
||||
try {
|
||||
log.DEBUG("Parsed Feed Keys", Object.keys(parsedFeed), parsedFeed?.title);
|
||||
if (parsedFeed?.items){
|
||||
for (const post of parsedFeed.items){
|
||||
try {
|
||||
if (parsedFeed?.items){
|
||||
this.unsetRemoveSource(source.link);
|
||||
for (const post of parsedFeed.items.reverse()){
|
||||
recordPromiseArray.push(new Promise((recordResolve, recordReject) => {
|
||||
log.DEBUG("Parsed Source Keys", Object.keys(post), post?.title);
|
||||
log.VERBOSE("Post from feed: ", post);
|
||||
if (!post.title || !post.link || !post.pubDate) return recordReject("Missing information from the post");
|
||||
if (!post.title || !post.link) return recordReject("Missing information from the post");
|
||||
if (!post.content || !post['content:encoded']) log.WARN("There is no content for post: ", post.title);
|
||||
|
||||
post.postId = post.postId ?? post.guid ?? post.id ?? libUtils.returnHash(post.title, post.link, post.pubDate);
|
||||
post.postId = post.postId ?? post.guid ?? post.id ?? libUtils.returnHash(post.title, post.link, post.pubDate ?? Date.now());
|
||||
tempPostStorage.getRecordBy('post_guid', post.postId, (err, existingRecord) => {
|
||||
if (err) throw err;
|
||||
|
||||
@@ -140,7 +188,7 @@ exports.updateFeeds = (client) => {
|
||||
|
||||
if (!sendResults) {
|
||||
log.ERROR("No sending results from sending a post: ", sendResults, existingRecord, post);
|
||||
return recordReject();
|
||||
return recordReject("No sending results from sending a post");
|
||||
}
|
||||
|
||||
log.DEBUG("Saving post to database: ", sendResults, post.title, source.channel_id);
|
||||
@@ -150,22 +198,26 @@ exports.updateFeeds = (client) => {
|
||||
|
||||
if (saveResults) {
|
||||
log.DEBUG("Saved results: ", saveResults);
|
||||
return recordResolve();
|
||||
return recordResolve("Saved results", saveResults);
|
||||
}
|
||||
});
|
||||
})
|
||||
})
|
||||
});
|
||||
})
|
||||
})
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.removeSource(source.link);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
log.ERROR("Error Parsing Feed: ", source.link, err);
|
||||
this.removeSource(source.link);
|
||||
throw err;
|
||||
}
|
||||
Promise.all(recordPromiseArray).then((values) => {
|
||||
log.DEBUG("All posts finished for: ", source.title, values);
|
||||
return resolve();
|
||||
return resolve(source.title);
|
||||
});
|
||||
});
|
||||
}))
|
||||
@@ -174,9 +226,8 @@ exports.updateFeeds = (client) => {
|
||||
// Wait for all connections to finish then close the temp connections
|
||||
|
||||
Promise.all(sourcePromiseArray).then((values) => {
|
||||
log.DEBUG("Closing temp connections: ", values);
|
||||
tempFeedStorage.closeConnection();
|
||||
tempPostStorage.closeConnection();
|
||||
log.DEBUG("All sources finished, closing temp connections: ", values);
|
||||
tempConnection.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -25,17 +25,41 @@ var Connection = mysql.createPool({
|
||||
});
|
||||
|
||||
// Helper Functions
|
||||
// Function to run and handle SQL errors
|
||||
/**
|
||||
* Function to run and handle SQL errors
|
||||
* @param {string} sqlQuery The SQL query string
|
||||
* @param {*} connection The SQL connection to be used to query
|
||||
* @param {function} callback The callback function to be called with an error or the results
|
||||
* @param {number} _retry Set by error retry, increments the number a query has been retried to increase wait time and track a specific query
|
||||
*/
|
||||
function runSQL(sqlQuery, connection, callback = (err, rows) => {
|
||||
log.ERROR(err);
|
||||
throw err;
|
||||
}) {
|
||||
}, _retry = 0) {
|
||||
// Start the MySQL Connection
|
||||
if (!connection) connection = Connection;
|
||||
connection.query(sqlQuery, (err, rows) => {
|
||||
if (err) {
|
||||
log.ERROR("SQL Error on query:", sqlQuery, err);
|
||||
return callback(err, undefined);
|
||||
if (err.code === "EHOSTUNREACH") {
|
||||
// DB Connection is unavailable
|
||||
let retryTimeout;
|
||||
switch(_retry){
|
||||
case 0:
|
||||
retryTimeout = 30000;
|
||||
break;
|
||||
case retry < 15:
|
||||
retryTimeout = 30000 + retry * 15000;
|
||||
break;
|
||||
default:
|
||||
log.ERROR("Retried Database 15 times over, please check connection status and restart the app", sqlQuery, err);
|
||||
return callback(err, undefined);
|
||||
}
|
||||
log.WARN(`Database connection is unavailable, waiting ${ retryTimeout / 1000 } seconds...`);
|
||||
_retry += 1
|
||||
// Wait for the retry timeout before trying the query again
|
||||
setTimeout(runSQL(sqlQuery, connection, callback, _retry));
|
||||
}
|
||||
else return callback(err, undefined);
|
||||
}
|
||||
log.VERBOSE(`SQL result for query '${sqlQuery}':`, rows);
|
||||
return callback(undefined, rows);
|
||||
|
||||
81
libUtils.js
81
libUtils.js
@@ -75,20 +75,26 @@ exports.onError = (error) => {
|
||||
}
|
||||
|
||||
exports.sendPost = (post, source, channel, callback) => {
|
||||
log.DEBUG("Sending post from source: ", post, source);
|
||||
// Reset the content parameter with the encoded parameter
|
||||
post.content = parse(post['content:encoded'] ?? post.content);
|
||||
const postTitle = post.title;
|
||||
log.DEBUG("Sending post from source: ", post, source);
|
||||
const postTitle = String(post.title).substring(0, 150);
|
||||
const postLink = post.link;
|
||||
// Get the post content and trim it to length or add a placeholder if necessary
|
||||
var postText = String(post.content.text);
|
||||
if (postText.length >= 300) postText = `${postText.slice(0, 300).substring(0, Math.min(String(post.content.text).length, String(post.content.text).lastIndexOf(" ")))}...`;
|
||||
else if (postText.length === 0) postText = `*This post has no content* [Direct Link](${post.link})`;
|
||||
var postContent = postText;
|
||||
let postContent;
|
||||
|
||||
if (post.content) {
|
||||
// Reset the content parameter with the encoded parameter
|
||||
post.content = parse(post['content:encoded'] ?? post.content);
|
||||
// Get the post content and trim it to length or add a placeholder if necessary
|
||||
var postText = String(post.content.text);
|
||||
if (postText.length >= 3800) postText = `${postText.slice(0, 3800).substring(0, Math.min(String(post.content.text).length, String(post.content.text).lastIndexOf(" ")))} [...](${post.link})`;
|
||||
else if (postText.length === 0) postText = `*This post has no content* [Direct Link](${post.link})`;
|
||||
postContent = postText;
|
||||
}
|
||||
else postContent = `*This post has no content* [Direct Link](${post.link})`;
|
||||
|
||||
// Check for embedded youtube videos and add the first four as links
|
||||
const ytVideos = String(post.content).match(youtubeVideoRegex);
|
||||
if (ytVideos) {
|
||||
for (const ytVideo of ytVideos.slice(0,4)){
|
||||
for (var ytVideo of ytVideos.slice(0,4)){
|
||||
// If the video is an embed, replace the embed to make it watchable
|
||||
if (ytVideo.includes("embed")) ytVideo = ytVideo.replace("embed/", "watch?v=");
|
||||
postContent += `\nEmbeded Video from Post: [YouTube](${ytVideo})`
|
||||
@@ -97,23 +103,27 @@ exports.sendPost = (post, source, channel, callback) => {
|
||||
log.DEBUG("Post content: ", postContent);
|
||||
|
||||
const postId = post.postId;
|
||||
const postPubDate = new Date(post.pubDate).toISOString() ?? new Date().toISOString();
|
||||
if (!post.pubDate) post.pubDate = Date.now();
|
||||
const postPubDate = new Date(post.pubDate).toISOString();
|
||||
|
||||
var postSourceLink = source.title;
|
||||
var postImage = post.image ?? undefined;
|
||||
|
||||
if (!postImage){
|
||||
const linksInPost = post.content.querySelectorAll("a");
|
||||
if (linksInPost) {
|
||||
log.DEBUG("Found links in post:", linksInPost);
|
||||
for (const link of linksInPost) {
|
||||
// Check to see if this link is a youtube video that was already found, if so skip it
|
||||
if (ytVideos?.includes(link)) continue;
|
||||
const images = String(link.getAttribute("href")).match(imageRegex);
|
||||
log.DEBUG("Images found in post:", images);
|
||||
if (images) {
|
||||
postImage = images[0];
|
||||
}
|
||||
}
|
||||
if (post.content){
|
||||
const linksInPost = post.content.querySelectorAll("a");
|
||||
if (linksInPost) {
|
||||
log.DEBUG("Found links in post:", linksInPost);
|
||||
for (const link of linksInPost) {
|
||||
// Check to see if this link is a youtube video that was already found, if so skip it
|
||||
if (ytVideos?.includes(link)) continue;
|
||||
const images = String(link.getAttribute("href")).match(imageRegex);
|
||||
log.DEBUG("Images found in post:", images);
|
||||
if (images) {
|
||||
postImage = images[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,15 +133,18 @@ exports.sendPost = (post, source, channel, callback) => {
|
||||
.setColor(0x0099FF)
|
||||
.setTitle(postTitle)
|
||||
.setURL(postLink)
|
||||
.addFields({ name: "Post Content", value: postContent, inline: false })
|
||||
.addFields({ name: 'Published', value: postPubDate, inline: true })
|
||||
.addFields({ name: 'Source', value: postSourceLink, inline: true });
|
||||
.addFields({ name: 'Source', value: postSourceLink, inline: true })
|
||||
.addFields({ name: 'Published', value: postPubDate, inline: true });
|
||||
|
||||
// TODO - If there is more than one image, create a canvas and post the created canvas
|
||||
if (postImage) {
|
||||
log.DEBUG("Image from post:", postImage);
|
||||
rssMessage.setImage(postImage);
|
||||
}
|
||||
|
||||
//Add the main content if it's present
|
||||
postContent = postContent.slice(0, 4090).trim();
|
||||
if (postContent) rssMessage.setDescription( postContent );
|
||||
|
||||
channel.send({ embeds: [rssMessage] });
|
||||
|
||||
@@ -140,11 +153,23 @@ exports.sendPost = (post, source, channel, callback) => {
|
||||
return callback(undefined, true);
|
||||
}
|
||||
catch (err){
|
||||
log.ERROR("Error sending message: ", err);
|
||||
log.ERROR("Error sending message: ", postTitle, postId, postContent, postPubDate, err);
|
||||
return callback(err, undefined);
|
||||
}
|
||||
}
|
||||
|
||||
exports.returnHash = (...stringsIncluded) => {
|
||||
return crypto.createHash('sha1').update(`${stringsIncluded.join("-<<??//\\\\??>>-")}`).digest("base64");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a key exists in an array of objects
|
||||
* @param {*} key The key to search for
|
||||
* @param {*} array The object to search for the key
|
||||
* @returns {boolean} If the key exists in the object
|
||||
*/
|
||||
exports.checkForKeyInArrayOfObjects = (key, array) => {
|
||||
return array.filter(function (o) {
|
||||
return o.hasOwnProperty(key);
|
||||
}).length > 0;
|
||||
}
|
||||
|
||||
15
update.sh
Executable file
15
update.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
# Stating Message
|
||||
echo "<!-- UPDATING ---!>"
|
||||
|
||||
# TODO - Add an updater for Stable Diffusion API
|
||||
|
||||
# Update the git Repo
|
||||
git fetch -a -p
|
||||
git pull
|
||||
|
||||
# Install any new libraries
|
||||
npm i
|
||||
|
||||
# Update complete message
|
||||
echo "<!--- UPDATE COMPLETE! ---!>"
|
||||
@@ -18,7 +18,7 @@ exports.DebugBuilder = class DebugBuilder {
|
||||
this.ERROR = (...messageParts) => {
|
||||
const error = debug(`${appName}:${fileName}:ERROR`);
|
||||
error(messageParts);
|
||||
if (process.env.EXIT_ON_ERROR) setTimeout(process.exit, process.env.EXIT_ON_ERROR_DELAY ?? 0);
|
||||
if (process.env.EXIT_ON_ERROR && process.env.EXIT_ON_ERROR > 0) setTimeout(process.exit, process.env.EXIT_ON_ERROR_DELAY ?? 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ const mysql = require('mysql');
|
||||
const databaseConfig = require('../config/databaseConfig');
|
||||
const utils = require('./utils');
|
||||
|
||||
const connection = mysql.createConnection({
|
||||
const connection = mysql.createPool({
|
||||
host: databaseConfig.database_host,
|
||||
user: databaseConfig.database_user,
|
||||
password: databaseConfig.database_password,
|
||||
@@ -11,8 +11,6 @@ const connection = mysql.createConnection({
|
||||
|
||||
const nodesTable = `${databaseConfig.database_database}.nodes`;
|
||||
|
||||
connection.connect()
|
||||
|
||||
/** Get all nodes the server knows about regardless of status
|
||||
* @param {*} callback Callback function
|
||||
*/
|
||||
|
||||
Reference in New Issue
Block a user