25 Commits

Author SHA1 Message Date
Logan Cusano
bf461bd5a9 Fixed typo in MySQL error handling 2023-05-03 22:29:46 -04:00
Logan Cusano
64bee8c561 Reverse the order of RSS feeds to the newest is sent last 2023-04-01 21:08:47 -04:00
Logan Cusano
b7147695ac Fixing declaration bug and updating comments 2023-04-01 16:35:24 -04:00
Logan Cusano
d7da3c636c Update SQL helper function to retry the database if it's unreachable 2023-04-01 16:28:52 -04:00
Logan Cusano
68b5e5436c Using the correct variable if the post is empty 2023-03-31 20:36:25 -04:00
Logan Cusano
46ff0f49e8 Add default text if the post.content variable is empty 2023-03-31 20:20:32 -04:00
Logan Cusano
8a417d9ed7 Fixing bug in message sending 2023-03-30 23:03:56 -04:00
Logan Cusano
8ff82a462b Trim whitespace in RSS messages 2023-03-30 19:58:31 -04:00
Logan Cusano
1aea162b8f Update RSS messages to send the content in the description 2023-03-29 20:54:24 -04:00
Logan Cusano
edef8a4c7d Error handling different RSS feeds 2023-03-17 23:27:12 -04:00
Logan Cusano
c8b90ca220 Bugfix for potential #2 fix 2023-03-13 23:38:36 -04:00
Logan Cusano
ad09caac25 Changing only other Mysql connection to pool
- #2
2023-03-13 23:32:58 -04:00
Logan Cusano
89d1cf3ee6 Updated permissions on updater bash script 2023-03-13 00:14:50 -04:00
Logan Cusano
14693692d0 Adding updater script 2023-03-13 00:13:25 -04:00
Logan Cusano
a86e4b2876 Wait for the refresh period before starting RSS feeds 2023-03-13 00:07:07 -04:00
Logan Cusano
a7bcf971c4 Fixed bug when sending DALL-E images 2023-03-13 00:06:39 -04:00
Logan Cusano
f840d754ac Merge branch 'main' of git.vpn.cusano.net:logan/Emmelia-Link-Flayer-Rewrite 2023-03-12 15:54:52 -04:00
Logan Cusano
cb8dfca8dd Return the source title from record promises 2023-03-12 15:54:22 -04:00
d372bcd8af Merge pull request 'feature/merge-multiple-images' (#7) from feature/merge-multiple-images into main
Images will now have their own embeds
2023-03-12 04:39:35 -04:00
Logan Cusano
cdb766520d Added multiple embed builders for multiple images 2023-03-12 04:23:17 -04:00
fcf61f3958 Merge pull request 'merge production into feature branch' (#4) from main into feature/merge-multiple-images
Reviewed-on: #4
2023-03-12 04:01:51 -04:00
Logan Cusano
af74c7b90d Remove unused module 2023-03-12 03:59:01 -04:00
Logan Cusano
ffacd19883 Potential fix fo #2 2023-03-12 03:47:49 -04:00
Logan Cusano
9e2814cb2c Init branch, WIP
Needs new library to merge images
2023-03-11 23:07:48 -05:00
Logan Cusano
72134b1b7b Working on #2 2023-03-11 23:05:50 -05:00
10 changed files with 324 additions and 125 deletions

View File

@@ -1,10 +1,10 @@
const { submitImagePromptTransaction } = require("../controllers/openAiController");
const { submitImagePromptTransaction, DALLE_COLOR } = require("../controllers/openAiController");
const { SlashCommandBuilder } = require('discord.js');
const { DebugBuilder } = require("../utilities/debugBuilder");
const log = new DebugBuilder("server", "imagine");
const { EmmeliaEmbedBuilder } = require('../libUtils');
const COST_OF_COMMAND = 800
const COST_OF_COMMAND = 800;
module.exports = {
data: new SlashCommandBuilder()
@@ -45,21 +45,31 @@ module.exports = {
submitImagePromptTransaction(promptText, discordAccountId, images, size, interaction, this, async (err, imageResults) => {
if (err) throw err;
var dalleEmbeds = [];
log.DEBUG("Image Results: ", imageResults)
const dalleEmbed = new EmmeliaEmbedBuilder()
.setColor(0x0099FF)
// Add the information post
dalleEmbeds.push(new EmmeliaEmbedBuilder()
.setColor(DALLE_COLOR)
.setTitle(`New Image Result`)
.setDescription(`${interaction.member.user} sent the prompt: '${promptText}'`)
.addFields({ name: 'Tokens Used', value: `${imageResults.totalTokens}`, inline: true })
const imagesInResult = Array(imageResults.results.data).length
);
// Add the images to the result
const imagesInResult = Array(imageResults.results).length
log.DEBUG("Images in the result: ", imagesInResult);
if (imagesInResult == 1) dalleEmbed.setImage(imageResults.results.data[0].url);
await interaction.editReply({ embeds: [dalleEmbed], ephemeral: false });
if (imagesInResult >= 1) {
for (const imageData of imageResults.results.data){
const imageUrl = imageData.url;
dalleEmbeds.push(new EmmeliaEmbedBuilder().setURL(imageUrl).setImage(imageUrl).setColor(DALLE_COLOR));
}
}
// Add the information post
dalleEmbeds.push(new EmmeliaEmbedBuilder()
.setColor(DALLE_COLOR)
.addFields({ name: 'Tokens Used', value: `${imageResults.totalTokens}`, inline: true })
.addFields({ name: 'Images Generated', value: `${imagesInResult}`, inline: true })
.addFields({ name: 'Image Size Requested', value: `${imagesInResult}`, inline: true })
);
await interaction.editReply({ embeds: dalleEmbeds, ephemeral: false });
});
// Needs reply code to reply to the generation

View File

@@ -14,6 +14,10 @@ const configuration = new Configuration({
const openai = new OpenAIApi(configuration);
// Global Vars for Other functions
exports.DALLE_COLOR = 0x34c6eb;
exports.CHATGPT_COLOR = 0x34eb9b;
async function getImageGeneration(_prompt, { _images_to_generate = 1, _image_size = "256x256" }, callback){
const validImageSizes = ["256x256", "512x512", "1024x1024"];
@@ -140,7 +144,7 @@ exports.submitTextPromptTransaction = async (prompt, temperature, max_tokens, di
}
if (!images_to_generate) images_to_generate = 1;
if (!image_size) images_to_generate = "256x256";
if (!image_size) image_size = "256x256";
totalTokensToBeUsed = pricePerImage * images_to_generate;

View File

@@ -14,8 +14,8 @@ exports.RSSController = class RSSController {
}
async start(){
// Wait 30 seconds for the rest of the bot to start before starting rss feeds
await new Promise(resolve => setTimeout(resolve, 30000));
// Wait for the refresh period before starting rss feeds, so the rest of the bot can start
await new Promise(resolve => setTimeout(resolve, refreshInterval));
log.INFO("Starting RSS Controller");
// Get initial feeds before the starting the infinite loop

View File

@@ -5,6 +5,7 @@ const { FeedStorage, PostStorage } = require("./libStorage");
const libUtils = require("./libUtils");
const { DebugBuilder } = require("./utilities/debugBuilder");
const log = new DebugBuilder("server", "libCore");
const mysql = require("mysql");
const UserAgent = require("user-agents");
process.env.USER_AGENT_STRING = new UserAgent({ platform: 'Win32' }).toString();
@@ -24,6 +25,52 @@ let parser = new Parser({
var feedStorage = new FeedStorage();
var postStorage = new PostStorage();
// Initiate a running array of objects to keep track of sources that have no feeds/posts
/*
var runningPostsToRemove = [{
"{SOURCE URL}": {NUMBER OF TIMES IT'S BEEN REMOVED}
}]
*/
var runningPostsToRemove = {};
const sourceFailureLimit = process.env.SOURCE_FAILURE_LIMIT ?? 3;
/**
*
* @param {*} sourceURL
*/
exports.removeSource = function removeSource(sourceURL) {
log.INFO("Removing source URL: ", sourceURL);
if (!sourceURL in runningPostsToRemove) {runningPostsToRemove[sourceURL] = 1; return;}
if (runningPostsToRemove[sourceURL] < sourceFailureLimit) {runningPostsToRemove[sourceURL] += 1; return;}
feedStorage.getRecordBy('link', sourceURL, (err, record) => {
if (err) log.ERROR("Error getting record from feedStorage", err);
if (!record) log.ERROR("No source returned from feedStorage");
feedStorage.destroy(record.id, (err, results) => {
if (err) log.ERROR("Error removing ID from results", err);
if (!results) log.WARN("No results from remove entry");
log.DEBUG("Source exceeded the limit of retries and has been removed", sourceURL);
return;
})
})
}
/**
* Unset a source URL from deletion if the source has not already been deleted
* @param {*} sourceURL The source URL to be unset from deletion
* @returns {*}
*/
exports.unsetRemoveSource = function unsetRemoveSource(sourceURL) {
log.INFO("Unsetting source URL from deletion (if not already deleted): ", sourceURL);
if (!sourceURL in runningPostsToRemove) return;
if (runningPostsToRemove[sourceURL] > sourceFailureLimit) return delete runningPostsToRemove[sourceURL];
}
/**
* Adds or updates new source url to configured storage
* @constructor
@@ -83,63 +130,105 @@ exports.deleteSource = function (title, callback) {
/**
* Update channels with new posts from sources
*/
exports.updateFeeds = async (client) => {
exports.updateFeeds = (client) => {
if (!client) throw new Error("Client object not passed");
feedStorage.getAllRecords(async (err, records) => {
// Create a temp pool to use for all connections while updating the feed
var tempConnection = mysql.createPool({
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASS,
database: process.env.DB_NAME,
connectionLimit: 10
});
const tempFeedStorage = new FeedStorage(tempConnection);
const tempPostStorage = new PostStorage(tempConnection);
// Array of promises to wait on before closing the connection
var recordPromiseArray = [];
var sourcePromiseArray = [];
tempFeedStorage.getAllRecords(async (err, records) => {
// Load the posts from each RSS source
for (const source of records) {
log.DEBUG('Record title: ', source.title);
log.DEBUG('Record link: ', source.link);
log.DEBUG('Record category: ', source.category);
log.DEBUG('Record guild ID: ', source.guild_id);
log.DEBUG('Record channel ID: ', source.channel_id);
sourcePromiseArray.push(new Promise((resolve, reject) => {
log.DEBUG('Record title: ', source.title);
log.DEBUG('Record link: ', source.link);
log.DEBUG('Record category: ', source.category);
log.DEBUG('Record guild ID: ', source.guild_id);
log.DEBUG('Record channel ID: ', source.channel_id);
// Parse the RSS feed
parser.parseURL(source.link, async (err, parsedFeed) => {
if (err) {
log.ERROR("Parser Error: ", runningPostsToRemove, source, err);
// Call the wrapper to make sure the site isn't just down at the time it checks and is back up the next time
this.removeSource(source.link);
reject;
}
try {
if (parsedFeed?.items){
this.unsetRemoveSource(source.link);
for (const post of parsedFeed.items.reverse()){
recordPromiseArray.push(new Promise((recordResolve, recordReject) => {
log.DEBUG("Parsed Source Keys", Object.keys(post), post?.title);
log.VERBOSE("Post from feed: ", post);
if (!post.title || !post.link) return recordReject("Missing information from the post");
if (!post.content || !post['content:encoded']) log.WARN("There is no content for post: ", post.title);
await parser.parseURL(source.link, async (err, parsedFeed) => {
if (err) {
log.ERROR("Parser Error: ", source, err);
return;
}
post.postId = post.postId ?? post.guid ?? post.id ?? libUtils.returnHash(post.title, post.link, post.pubDate ?? Date.now());
tempPostStorage.getRecordBy('post_guid', post.postId, (err, existingRecord) => {
if (err) throw err;
log.DEBUG("Existing post record: ", existingRecord);
if (existingRecord) return recordResolve("Existing record found for this post");
try{
log.DEBUG("Parsed Feed Keys", Object.keys(parsedFeed), parsedFeed?.title);
if (parsedFeed?.items){
for (const post of parsedFeed.items){
log.DEBUG("Parsed Source Keys", Object.keys(post), post?.title);
//log.VERBOSE("Post from feed: ", post);
if (post.title && post.link && post.content && ( post.postId || post.guid || post.id ) && post.pubDate){
post.postId = post.postId ?? post.guid ?? post.id;
postStorage.getRecordBy('post_guid', post.postId, (err, existingRecord) => {
if (err) throw err;
log.DEBUG("Existing post record: ", existingRecord);
if (!existingRecord){
const channel = client.channels.cache.get(source.channel_id);
libUtils.sendPost(post, source, channel, (err, sendResults) =>{
if (err) throw err;
if (sendResults){
log.DEBUG("Saving post to database: ", sendResults, post.title, source.channel_id);
postStorage.savePost(post, (err, saveResults) => {
if(err) throw err;
if (saveResults) {
log.DEBUG("Saved results: ", saveResults);
return;
}
});
if (!sendResults) {
log.ERROR("No sending results from sending a post: ", sendResults, existingRecord, post);
return recordReject("No sending results from sending a post");
}
log.DEBUG("Saving post to database: ", sendResults, post.title, source.channel_id);
tempPostStorage.savePost(post, (err, saveResults) => {
if(err) throw err;
if (saveResults) {
log.DEBUG("Saved results: ", saveResults);
return recordResolve("Saved results", saveResults);
}
});
})
}
})
})
}))
}
}
else {
this.removeSource(source.link);
}
}
}catch (err) {
log.ERROR("Error Parsing Feed: ", source.link, err);
throw err;
}
});
catch (err) {
log.ERROR("Error Parsing Feed: ", source.link, err);
this.removeSource(source.link);
throw err;
}
Promise.all(recordPromiseArray).then((values) => {
log.DEBUG("All posts finished for: ", source.title, values);
return resolve(source.title);
});
});
}))
}
// Wait for all connections to finish then close the temp connections
Promise.all(sourcePromiseArray).then((values) => {
log.DEBUG("All sources finished, closing temp connections: ", values);
tempConnection.end();
});
});
}

View File

@@ -16,7 +16,7 @@ const accountsTable = process.env.DB_ACCOUNTS_TABLE;
const transactionsTable = process.env.DB_TRANSACTIONS_TABLE;
const pricingTable = process.env.DB_PRICING_TABLE;
var connection = mysql.createPool({
var Connection = mysql.createPool({
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASS,
@@ -25,16 +25,41 @@ var connection = mysql.createPool({
});
// Helper Functions
// Function to run and handle SQL errors
function runSQL(sqlQuery, callback = (err, rows) => {
/**
* Function to run and handle SQL errors
* @param {string} sqlQuery The SQL query string
* @param {*} connection The SQL connection to be used to query
* @param {function} callback The callback function to be called with an error or the results
* @param {number} _retry Set by error retry, increments the number a query has been retried to increase wait time and track a specific query
*/
function runSQL(sqlQuery, connection, callback = (err, rows) => {
log.ERROR(err);
throw err;
}) {
// Start the MySQL Connection
}, _retry = 0) {
// Start the MySQL Connection
if (!connection) connection = Connection;
connection.query(sqlQuery, (err, rows) => {
if (err) {
log.ERROR("SQL Error:", err)
return callback(err, undefined);
if (err.code === "EHOSTUNREACH") {
// DB Connection is unavailable
let retryTimeout;
switch(_retry){
case 0:
retryTimeout = 30000;
break;
case retry < 15:
retryTimeout = 30000 + retry * 15000;
break;
default:
log.ERROR("Retried Database 15 times over, please check connection status and restart the app", sqlQuery, err);
return callback(err, undefined);
}
log.WARN(`Database connection is unavailable, waiting ${ retryTimeout / 1000 } seconds...`);
_retry += 1
// Wait for the retry timeout before trying the query again
setTimeout(runSQL(sqlQuery, connection, callback, _retry));
}
else return callback(err, undefined);
}
log.VERBOSE(`SQL result for query '${sqlQuery}':`, rows);
return callback(undefined, rows);
@@ -51,13 +76,14 @@ function returnMysqlTime(){
}
class Storage {
constructor(_dbTable) {
constructor(_dbTable, _connection) {
this.dbTable = _dbTable;
this.connection = _connection;
this.validKeys = [];
var sqlQuery = `SHOW COLUMNS FROM ${this.dbTable};`;
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return log.ERROR("Error getting column names: ", err);
if (rows){
for (const validKey of rows){
@@ -96,7 +122,7 @@ class Storage {
const sqlQuery = `SELECT * FROM ${this.dbTable} WHERE ${key} = "${keyValue}"`;
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
if (rows[0]?.[key]) return callback(undefined, rows[0]);
else return callback(undefined, false);
@@ -113,7 +139,7 @@ class Storage {
let records = [];
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
for (const row of rows) {
if (this.dbTable == rssFeedsTable){
@@ -139,7 +165,7 @@ class Storage {
let records = [];
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
for (const row of rows) {
if (this.dbTable == rssFeedsTable){
@@ -153,11 +179,21 @@ class Storage {
return callback(undefined, records);
});
}
closeConnection() {
try {
this.connection.end();
}
catch (err) {
log.ERROR("Error closing connection :", this.connection, err);
throw err;
}
}
}
exports.UserStorage = class UserStorage extends Storage {
constructor() {
super(accountsTable);
constructor(connection = undefined) {
super(accountsTable, connection);
}
/**
@@ -171,7 +207,7 @@ exports.UserStorage = class UserStorage extends Storage {
log.DEBUG(`Adding new entry with SQL query: '${sqlQuery}'`)
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
if (rows?.affectedRows > 0) return callback(undefined, rows);
return callback(undefined, undefined);
@@ -231,7 +267,7 @@ exports.UserStorage = class UserStorage extends Storage {
log.DEBUG("Updating Balance with SQL Query: ", sqlQuery);
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
if (!rows?.affectedRows > 0) return callback(new Error("Error updating Balance", rows), undefined);
return callback(undefined, rows);
@@ -240,8 +276,8 @@ exports.UserStorage = class UserStorage extends Storage {
}
exports.TransactionStorage = class TransactionStorage extends Storage {
constructor() {
super(transactionsTable);
constructor(connection = undefined) {
super(transactionsTable, connection);
}
createTransaction(transaction, callback){
@@ -249,7 +285,7 @@ exports.TransactionStorage = class TransactionStorage extends Storage {
log.DEBUG(`Adding new entry with SQL query: '${sqlQuery}'`)
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
if (rows?.affectedRows > 0) return callback(undefined, rows);
return callback(undefined, undefined);
@@ -258,8 +294,8 @@ exports.TransactionStorage = class TransactionStorage extends Storage {
}
exports.FeedStorage = class FeedStorage extends Storage {
constructor() {
super(rssFeedsTable);
constructor(connection = undefined) {
super(rssFeedsTable, connection);
}
/**
@@ -316,7 +352,7 @@ exports.FeedStorage = class FeedStorage extends Storage {
log.DEBUG(`Adding new entry with SQL query: '${sqlQuery}'`)
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
return callback(undefined, rows);
})
@@ -355,7 +391,7 @@ exports.FeedStorage = class FeedStorage extends Storage {
log.DEBUG(`Updating entry with SQL query: '${sqlQuery}'`)
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
return callback(undefined, rows);
})
@@ -373,7 +409,7 @@ exports.FeedStorage = class FeedStorage extends Storage {
const sqlQuery = `DELETE FROM ${this.dbTable} WHERE id = "${id}";`;
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
return callback(undefined, rows[0]);
})
@@ -439,8 +475,8 @@ exports.FeedStorage = class FeedStorage extends Storage {
}
exports.PostStorage = class PostStorage extends Storage {
constructor() {
super(rssPostsTable);
constructor(connection = undefined) {
super(rssPostsTable, connection);
}
savePost(_postObject, callback){
@@ -456,7 +492,7 @@ exports.PostStorage = class PostStorage extends Storage {
log.DEBUG(`Adding new post with SQL query: '${sqlQuery}'`)
runSQL(sqlQuery, (err, rows) => {
runSQL(sqlQuery, this.connection, (err, rows) => {
if (err) return callback(err, undefined);
return callback(undefined, rows);
})

View File

@@ -3,8 +3,10 @@ const { DebugBuilder } = require("./utilities/debugBuilder");
const log = new DebugBuilder("server", "libUtils");
const { NodeHtmlMarkdown } = require('node-html-markdown');
const { parse } = require("node-html-parser");
const crypto = require("crypto");
const imageRegex = /(http(s?):)([/|.|\w|\s|-])*((\.(?:jpg|gif|png|webm))|(\/gallery\/(?:[/|.|\w|\s|-])*))/g;
const youtubeVideoRegex = /((?:https?:)?\/\/)?((?:www|m)\.)?((?:youtube(-nocookie)?\.com|youtu.be))(\/(?:[\w\-]+\?v=|embed\/|v\/)?)([\w\-]+)/g
exports.EmmeliaEmbedBuilder = class PostEmbedBuilder extends EmbedBuilder {
constructor() {
@@ -74,40 +76,65 @@ exports.onError = (error) => {
exports.sendPost = (post, source, channel, callback) => {
log.DEBUG("Sending post from source: ", post, source);
post.content = parse(post.content);
const postTitle = post.title;
const postTitle = String(post.title).substring(0, 150);
const postLink = post.link;
const postContent = NodeHtmlMarkdown.translate(post.content.text);
let postContent;
if (post.content) {
// Reset the content parameter with the encoded parameter
post.content = parse(post['content:encoded'] ?? post.content);
// Get the post content and trim it to length or add a placeholder if necessary
var postText = String(post.content.text);
if (postText.length >= 3800) postText = `${postText.slice(0, 3800).substring(0, Math.min(String(post.content.text).length, String(post.content.text).lastIndexOf(" ")))} [...](${post.link})`;
else if (postText.length === 0) postText = `*This post has no content* [Direct Link](${post.link})`;
postContent = postText;
}
else postContent = `*This post has no content* [Direct Link](${post.link})`;
// Check for embedded youtube videos and add the first four as links
const ytVideos = String(post.content).match(youtubeVideoRegex);
if (ytVideos) {
for (var ytVideo of ytVideos.slice(0,4)){
// If the video is an embed, replace the embed to make it watchable
if (ytVideo.includes("embed")) ytVideo = ytVideo.replace("embed/", "watch?v=");
postContent += `\nEmbeded Video from Post: [YouTube](${ytVideo})`
}
}
log.DEBUG("Post content: ", postContent);
const postId = post.postId;
const postPubDate = new Date(post.pubDate).toISOString() ?? new Date().toISOString();
if (!post.pubDate) post.pubDate = Date.now();
const postPubDate = new Date(post.pubDate).toISOString();
var postSourceLink = source.title;
var postImage = post.image ?? undefined;
if (!postImage){
const linksInPost = post.content.querySelectorAll("a");
if (linksInPost) {
log.DEBUG("Found links in post:", linksInPost);
for (const link of linksInPost) {
const images = String(link.getAttribute("href")).match(imageRegex);
log.DEBUG("Images found in post:", images);
if (images) {
postImage = images[0];
if (post.content){
const linksInPost = post.content.querySelectorAll("a");
if (linksInPost) {
log.DEBUG("Found links in post:", linksInPost);
for (const link of linksInPost) {
// Check to see if this link is a youtube video that was already found, if so skip it
if (ytVideos?.includes(link)) continue;
const images = String(link.getAttribute("href")).match(imageRegex);
log.DEBUG("Images found in post:", images);
if (images) {
postImage = images[0];
}
}
}
}
}
log.DEBUG("Sending an RSS post to discord", postTitle, postId)
log.DEBUG("Sending an RSS post to discord", postTitle, postId, postContent)
try{
const rssMessage = new this.EmmeliaEmbedBuilder()
.setColor(0x0099FF)
.setTitle(postTitle)
.setURL(postLink)
.addFields({ name: "Post Content", value: postContent.slice(0,1024), inline: false })
.addFields({ name: 'Published', value: postPubDate, inline: true })
.addFields({ name: 'Source', value: postSourceLink, inline: true });
.addFields({ name: 'Source', value: postSourceLink, inline: true })
.addFields({ name: 'Published', value: postPubDate, inline: true });
// TODO - If there is more than one image, create a canvas and post the created canvas
if (postImage) {
@@ -115,6 +142,10 @@ exports.sendPost = (post, source, channel, callback) => {
rssMessage.setImage(postImage);
}
//Add the main content if it's present
postContent = postContent.slice(0, 4090).trim();
if (postContent) rssMessage.setDescription( postContent );
channel.send({ embeds: [rssMessage] });
//throw new Error("YOU SHALL NOT PASS");
@@ -122,7 +153,23 @@ exports.sendPost = (post, source, channel, callback) => {
return callback(undefined, true);
}
catch (err){
log.ERROR("Error sending message: ", err);
log.ERROR("Error sending message: ", postTitle, postId, postContent, postPubDate, err);
return callback(err, undefined);
}
}
exports.returnHash = (...stringsIncluded) => {
return crypto.createHash('sha1').update(`${stringsIncluded.join("-<<??//\\\\??>>-")}`).digest("base64");
}
/**
* Check if a key exists in an array of objects
* @param {*} key The key to search for
* @param {*} array The object to search for the key
* @returns {boolean} If the key exists in the object
*/
exports.checkForKeyInArrayOfObjects = (key, array) => {
return array.filter(function (o) {
return o.hasOwnProperty(key);
}).length > 0;
}

View File

@@ -8,25 +8,25 @@
"@discordjs/rest": "~1.5.0",
"axios": "~1.3.4",
"chatgpt": "~4.7.2",
"cookie-parser": "~1.4.4",
"debug": "~2.6.9",
"discord-api-types": "~0.37.35",
"discord.js": "~14.7.1",
"dotenv": "~16.0.3",
"ejs": "~2.6.1",
"express": "~4.18.2",
"fs": "~0.0.1-security",
"gpt-3-encoder": "~1.1.4",
"http-errors": "~1.6.3",
"jsdoc": "~3.6.7",
"jsonfile": "~6.1.0",
"morgan": "~1.9.1",
"mysql": "~2.18.1",
"node-html-markdown": "~1.3.0",
"node-html-parser": "~6.1.5",
"openai": "~3.1.0",
"parse-files": "~0.1.1",
"rss-parser": "~3.12.0",
"mysql": "~2.18.1",
"cookie-parser": "~1.4.4",
"debug": "~2.6.9",
"ejs": "~2.6.1",
"http-errors": "~1.6.3",
"morgan": "~1.9.1",
"node-html-markdown": "~1.3.0",
"node-html-parser": "~6.1.5",
"gpt-3-encoder": "~1.1.4",
"user-agents": "~1.0.1303"
},
"scripts": {

15
update.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
# Stating Message
echo "<!-- UPDATING ---!>"
# TODO - Add an updater for Stable Diffusion API
# Update the git Repo
git fetch -a -p
git pull
# Install any new libraries
npm i
# Update complete message
echo "<!--- UPDATE COMPLETE! ---!>"

View File

@@ -18,7 +18,7 @@ exports.DebugBuilder = class DebugBuilder {
this.ERROR = (...messageParts) => {
const error = debug(`${appName}:${fileName}:ERROR`);
error(messageParts);
if (process.env.EXIT_ON_ERROR) setTimeout(process.exit, process.env.EXIT_ON_ERROR_DELAY ?? 0);
if (process.env.EXIT_ON_ERROR && process.env.EXIT_ON_ERROR > 0) setTimeout(process.exit, process.env.EXIT_ON_ERROR_DELAY ?? 0);
}
}
}

View File

@@ -2,7 +2,7 @@ const mysql = require('mysql');
const databaseConfig = require('../config/databaseConfig');
const utils = require('./utils');
const connection = mysql.createConnection({
const connection = mysql.createPool({
host: databaseConfig.database_host,
user: databaseConfig.database_user,
password: databaseConfig.database_password,
@@ -11,8 +11,6 @@ const connection = mysql.createConnection({
const nodesTable = `${databaseConfig.database_database}.nodes`;
connection.connect()
/** Get all nodes the server knows about regardless of status
* @param {*} callback Callback function
*/