Compare commits
16 Commits
feature/#6
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bf461bd5a9 | ||
|
|
64bee8c561 | ||
|
|
b7147695ac | ||
|
|
d7da3c636c | ||
|
|
68b5e5436c | ||
|
|
46ff0f49e8 | ||
|
|
8a417d9ed7 | ||
|
|
8ff82a462b | ||
|
|
1aea162b8f | ||
|
|
edef8a4c7d | ||
|
|
c8b90ca220 | ||
|
|
ad09caac25 | ||
|
|
89d1cf3ee6 | ||
|
|
14693692d0 | ||
|
|
a86e4b2876 | ||
|
|
a7bcf971c4 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -3,5 +3,4 @@ node_modules/
|
||||
package-lock.json
|
||||
*.bak
|
||||
*.log
|
||||
*._.*
|
||||
.generations/
|
||||
*._.*
|
||||
@@ -1,6 +1,6 @@
|
||||
const { submitImagePromptTransaction, DALLE_COLOR } = require("../controllers/openAiController");
|
||||
const { SlashCommandBuilder } = require('discord.js');
|
||||
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||
const { submitImagePromptTransaction, DALLE_COLOR } = require("../controllers/openAiController");
|
||||
const log = new DebugBuilder("server", "imagine");
|
||||
const { EmmeliaEmbedBuilder } = require('../libUtils');
|
||||
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
const { SlashCommandBuilder } = require('discord.js');
|
||||
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||
const log = new DebugBuilder("server", "stability");
|
||||
const { submitImageGenerationTransaction, STABILITY_COLOR } = require("../controllers/stabilityController");
|
||||
const { EmmeliaEmbedBuilder } = require('../libUtils');
|
||||
|
||||
|
||||
module.exports = {
|
||||
data: new SlashCommandBuilder()
|
||||
.setName('stability')
|
||||
.setDescription('Replies with your input!'),
|
||||
/*
|
||||
.addStringOption(option =>
|
||||
option.setName('input')
|
||||
.setDescription('The input to echo back')
|
||||
.setRequired(false)
|
||||
.addChoices()),
|
||||
*/
|
||||
example: "stability",
|
||||
isPrivileged: true,
|
||||
requiresTokens: false,
|
||||
defaultTokenUsage: 0,
|
||||
deferInitialReply: true,
|
||||
async execute(interaction) {
|
||||
try{
|
||||
await submitImageGenerationTransaction();
|
||||
await interaction.editReply('**Pong.**'); // TODO - Add insults as the response to this command
|
||||
}catch(err){
|
||||
log.ERROR(err)
|
||||
//await interaction.reply(err.toString());
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -14,8 +14,8 @@ exports.RSSController = class RSSController {
|
||||
}
|
||||
|
||||
async start(){
|
||||
// Wait 30 seconds for the rest of the bot to start before starting rss feeds
|
||||
await new Promise(resolve => setTimeout(resolve, 30000));
|
||||
// Wait for the refresh period before starting rss feeds, so the rest of the bot can start
|
||||
await new Promise(resolve => setTimeout(resolve, refreshInterval));
|
||||
|
||||
log.INFO("Starting RSS Controller");
|
||||
// Get initial feeds before the starting the infinite loop
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
// GENERATED CODE -- DO NOT EDIT!
|
||||
|
||||
'use strict';
|
||||
var grpc = require('grpc');
|
||||
var generation_pb = require('./generation_pb.js');
|
||||
var google_protobuf_struct_pb = require('google-protobuf/google/protobuf/struct_pb.js');
|
||||
var tensors_pb = require('./tensors_pb.js');
|
||||
|
||||
function serialize_gooseai_Answer(arg) {
|
||||
if (!(arg instanceof generation_pb.Answer)) {
|
||||
throw new Error('Expected argument of type gooseai.Answer');
|
||||
}
|
||||
return Buffer.from(arg.serializeBinary());
|
||||
}
|
||||
|
||||
function deserialize_gooseai_Answer(buffer_arg) {
|
||||
return generation_pb.Answer.deserializeBinary(new Uint8Array(buffer_arg));
|
||||
}
|
||||
|
||||
function serialize_gooseai_ChainRequest(arg) {
|
||||
if (!(arg instanceof generation_pb.ChainRequest)) {
|
||||
throw new Error('Expected argument of type gooseai.ChainRequest');
|
||||
}
|
||||
return Buffer.from(arg.serializeBinary());
|
||||
}
|
||||
|
||||
function deserialize_gooseai_ChainRequest(buffer_arg) {
|
||||
return generation_pb.ChainRequest.deserializeBinary(new Uint8Array(buffer_arg));
|
||||
}
|
||||
|
||||
function serialize_gooseai_Request(arg) {
|
||||
if (!(arg instanceof generation_pb.Request)) {
|
||||
throw new Error('Expected argument of type gooseai.Request');
|
||||
}
|
||||
return Buffer.from(arg.serializeBinary());
|
||||
}
|
||||
|
||||
function deserialize_gooseai_Request(buffer_arg) {
|
||||
return generation_pb.Request.deserializeBinary(new Uint8Array(buffer_arg));
|
||||
}
|
||||
|
||||
|
||||
//
|
||||
// gRPC services
|
||||
//
|
||||
var GenerationServiceService = exports.GenerationServiceService = {
|
||||
generate: {
|
||||
path: '/gooseai.GenerationService/Generate',
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestType: generation_pb.Request,
|
||||
responseType: generation_pb.Answer,
|
||||
requestSerialize: serialize_gooseai_Request,
|
||||
requestDeserialize: deserialize_gooseai_Request,
|
||||
responseSerialize: serialize_gooseai_Answer,
|
||||
responseDeserialize: deserialize_gooseai_Answer,
|
||||
},
|
||||
chainGenerate: {
|
||||
path: '/gooseai.GenerationService/ChainGenerate',
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestType: generation_pb.ChainRequest,
|
||||
responseType: generation_pb.Answer,
|
||||
requestSerialize: serialize_gooseai_ChainRequest,
|
||||
requestDeserialize: deserialize_gooseai_ChainRequest,
|
||||
responseSerialize: serialize_gooseai_Answer,
|
||||
responseDeserialize: deserialize_gooseai_Answer,
|
||||
},
|
||||
};
|
||||
|
||||
exports.GenerationServiceClient = grpc.makeGenericClientConstructor(GenerationServiceService);
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,117 +0,0 @@
|
||||
// package: gooseai
|
||||
// file: generation.proto
|
||||
|
||||
var generation_pb = require("./generation_pb");
|
||||
var grpc = require("@improbable-eng/grpc-web").grpc;
|
||||
|
||||
var GenerationService = (function () {
|
||||
function GenerationService() {}
|
||||
GenerationService.serviceName = "gooseai.GenerationService";
|
||||
return GenerationService;
|
||||
}());
|
||||
|
||||
GenerationService.Generate = {
|
||||
methodName: "Generate",
|
||||
service: GenerationService,
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestType: generation_pb.Request,
|
||||
responseType: generation_pb.Answer
|
||||
};
|
||||
|
||||
GenerationService.ChainGenerate = {
|
||||
methodName: "ChainGenerate",
|
||||
service: GenerationService,
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestType: generation_pb.ChainRequest,
|
||||
responseType: generation_pb.Answer
|
||||
};
|
||||
|
||||
exports.GenerationService = GenerationService;
|
||||
|
||||
function GenerationServiceClient(serviceHost, options) {
|
||||
this.serviceHost = serviceHost;
|
||||
this.options = options || {};
|
||||
}
|
||||
|
||||
GenerationServiceClient.prototype.generate = function generate(requestMessage, metadata) {
|
||||
var listeners = {
|
||||
data: [],
|
||||
end: [],
|
||||
status: []
|
||||
};
|
||||
var client = grpc.invoke(GenerationService.Generate, {
|
||||
request: requestMessage,
|
||||
host: this.serviceHost,
|
||||
metadata: metadata,
|
||||
transport: this.options.transport,
|
||||
debug: this.options.debug,
|
||||
onMessage: function (responseMessage) {
|
||||
listeners.data.forEach(function (handler) {
|
||||
handler(responseMessage);
|
||||
});
|
||||
},
|
||||
onEnd: function (status, statusMessage, trailers) {
|
||||
listeners.status.forEach(function (handler) {
|
||||
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||
});
|
||||
listeners.end.forEach(function (handler) {
|
||||
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||
});
|
||||
listeners = null;
|
||||
}
|
||||
});
|
||||
return {
|
||||
on: function (type, handler) {
|
||||
listeners[type].push(handler);
|
||||
return this;
|
||||
},
|
||||
cancel: function () {
|
||||
listeners = null;
|
||||
client.close();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
GenerationServiceClient.prototype.chainGenerate = function chainGenerate(requestMessage, metadata) {
|
||||
var listeners = {
|
||||
data: [],
|
||||
end: [],
|
||||
status: []
|
||||
};
|
||||
var client = grpc.invoke(GenerationService.ChainGenerate, {
|
||||
request: requestMessage,
|
||||
host: this.serviceHost,
|
||||
metadata: metadata,
|
||||
transport: this.options.transport,
|
||||
debug: this.options.debug,
|
||||
onMessage: function (responseMessage) {
|
||||
listeners.data.forEach(function (handler) {
|
||||
handler(responseMessage);
|
||||
});
|
||||
},
|
||||
onEnd: function (status, statusMessage, trailers) {
|
||||
listeners.status.forEach(function (handler) {
|
||||
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||
});
|
||||
listeners.end.forEach(function (handler) {
|
||||
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||
});
|
||||
listeners = null;
|
||||
}
|
||||
});
|
||||
return {
|
||||
on: function (type, handler) {
|
||||
listeners[type].push(handler);
|
||||
return this;
|
||||
},
|
||||
cancel: function () {
|
||||
listeners = null;
|
||||
client.close();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
exports.GenerationServiceClient = GenerationServiceClient;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,60 +0,0 @@
|
||||
// Debug
|
||||
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||
const log = new DebugBuilder("server", "stabilityController");
|
||||
|
||||
// Modules
|
||||
const Generation = require("./stabilityAi/generation_pb");
|
||||
const { GenerationServiceClient } = require("./stabilityAi/generation_pb_service");
|
||||
|
||||
const { grpc } = require("@improbable-eng/grpc-web");
|
||||
const GRPCWeb = grpc;
|
||||
|
||||
const { NodeHttpTransport } = require("@improbable-eng/grpc-web-node-http-transport");
|
||||
const fs = require("fs");
|
||||
const {
|
||||
buildGenerationRequest,
|
||||
executeGenerationRequest,
|
||||
onGenerationComplete,
|
||||
} = require("../utilities/stabilityHelpers");
|
||||
|
||||
// Set Global Color for this controller
|
||||
exports.STABILITY_COLOR = 0xeb34b7;
|
||||
|
||||
// This is a NodeJS-specific requirement - browsers implementations should omit this line.
|
||||
GRPCWeb.setDefaultTransport(NodeHttpTransport());
|
||||
|
||||
// Authenticate using your API key, don't commit your key to a public repository!
|
||||
const metadata = new GRPCWeb.Metadata();
|
||||
metadata.set("Authorization", "Bearer " + process.env.STABILITY_API_KEY);
|
||||
|
||||
// Create a generation client to use with all future requests
|
||||
const stabilityClient = new GenerationServiceClient("https://grpc.stability.ai", {});
|
||||
|
||||
exports.submitImageGenerationTransaction = async () => {
|
||||
const request = buildGenerationRequest("stable-diffusion-512-v2-1", {
|
||||
type: "text-to-image",
|
||||
prompts: [
|
||||
{
|
||||
text: "A dream of a distant galaxy, by Caspar David Friedrich, matte painting trending on artstation HQ",
|
||||
},
|
||||
],
|
||||
width: 512,
|
||||
height: 512,
|
||||
samples: 1,
|
||||
cfgScale: 13,
|
||||
steps: 10,
|
||||
sampler: Generation.DiffusionSampler.SAMPLER_K_DPMPP_2M,
|
||||
});
|
||||
log.DEBUG("Stability request: ", request, metadata, stabilityClient);
|
||||
|
||||
executeGenerationRequest(stabilityClient, request, metadata)
|
||||
.then((response) => {
|
||||
log.DEBUG("Stability Generation response: ". response)
|
||||
onGenerationComplete(response);
|
||||
return;
|
||||
})
|
||||
.catch((error) => {
|
||||
log.ERROR("Failed to make text-to-image request:", error);
|
||||
}
|
||||
);
|
||||
}
|
||||
80
libCore.js
80
libCore.js
@@ -25,6 +25,52 @@ let parser = new Parser({
|
||||
var feedStorage = new FeedStorage();
|
||||
var postStorage = new PostStorage();
|
||||
|
||||
// Initiate a running array of objects to keep track of sources that have no feeds/posts
|
||||
/*
|
||||
var runningPostsToRemove = [{
|
||||
"{SOURCE URL}": {NUMBER OF TIMES IT'S BEEN REMOVED}
|
||||
}]
|
||||
*/
|
||||
var runningPostsToRemove = {};
|
||||
const sourceFailureLimit = process.env.SOURCE_FAILURE_LIMIT ?? 3;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} sourceURL
|
||||
*/
|
||||
exports.removeSource = function removeSource(sourceURL) {
|
||||
log.INFO("Removing source URL: ", sourceURL);
|
||||
if (!sourceURL in runningPostsToRemove) {runningPostsToRemove[sourceURL] = 1; return;}
|
||||
|
||||
if (runningPostsToRemove[sourceURL] < sourceFailureLimit) {runningPostsToRemove[sourceURL] += 1; return;}
|
||||
|
||||
feedStorage.getRecordBy('link', sourceURL, (err, record) => {
|
||||
if (err) log.ERROR("Error getting record from feedStorage", err);
|
||||
|
||||
if (!record) log.ERROR("No source returned from feedStorage");
|
||||
feedStorage.destroy(record.id, (err, results) => {
|
||||
if (err) log.ERROR("Error removing ID from results", err);
|
||||
|
||||
if (!results) log.WARN("No results from remove entry");
|
||||
|
||||
log.DEBUG("Source exceeded the limit of retries and has been removed", sourceURL);
|
||||
return;
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Unset a source URL from deletion if the source has not already been deleted
|
||||
* @param {*} sourceURL The source URL to be unset from deletion
|
||||
* @returns {*}
|
||||
*/
|
||||
exports.unsetRemoveSource = function unsetRemoveSource(sourceURL) {
|
||||
log.INFO("Unsetting source URL from deletion (if not already deleted): ", sourceURL);
|
||||
if (!sourceURL in runningPostsToRemove) return;
|
||||
|
||||
if (runningPostsToRemove[sourceURL] > sourceFailureLimit) return delete runningPostsToRemove[sourceURL];
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds or updates new source url to configured storage
|
||||
* @constructor
|
||||
@@ -105,29 +151,31 @@ exports.updateFeeds = (client) => {
|
||||
tempFeedStorage.getAllRecords(async (err, records) => {
|
||||
// Load the posts from each RSS source
|
||||
for (const source of records) {
|
||||
sourcePromiseArray.push(new Promise((resolve, reject) => {
|
||||
sourcePromiseArray.push(new Promise((resolve, reject) => {
|
||||
log.DEBUG('Record title: ', source.title);
|
||||
log.DEBUG('Record link: ', source.link);
|
||||
log.DEBUG('Record category: ', source.category);
|
||||
log.DEBUG('Record guild ID: ', source.guild_id);
|
||||
log.DEBUG('Record channel ID: ', source.channel_id);
|
||||
log.DEBUG('Record channel ID: ', source.channel_id);
|
||||
// Parse the RSS feed
|
||||
parser.parseURL(source.link, async (err, parsedFeed) => {
|
||||
if (err) {
|
||||
log.ERROR("Parser Error: ", source, err);
|
||||
log.ERROR("Parser Error: ", runningPostsToRemove, source, err);
|
||||
// Call the wrapper to make sure the site isn't just down at the time it checks and is back up the next time
|
||||
this.removeSource(source.link);
|
||||
reject;
|
||||
}
|
||||
try {
|
||||
log.DEBUG("Parsed Feed Keys", Object.keys(parsedFeed), parsedFeed?.title);
|
||||
if (parsedFeed?.items){
|
||||
for (const post of parsedFeed.items){
|
||||
try {
|
||||
if (parsedFeed?.items){
|
||||
this.unsetRemoveSource(source.link);
|
||||
for (const post of parsedFeed.items.reverse()){
|
||||
recordPromiseArray.push(new Promise((recordResolve, recordReject) => {
|
||||
log.DEBUG("Parsed Source Keys", Object.keys(post), post?.title);
|
||||
log.VERBOSE("Post from feed: ", post);
|
||||
if (!post.title || !post.link || !post.pubDate) return recordReject("Missing information from the post");
|
||||
if (!post.title || !post.link) return recordReject("Missing information from the post");
|
||||
if (!post.content || !post['content:encoded']) log.WARN("There is no content for post: ", post.title);
|
||||
|
||||
post.postId = post.postId ?? post.guid ?? post.id ?? libUtils.returnHash(post.title, post.link, post.pubDate);
|
||||
post.postId = post.postId ?? post.guid ?? post.id ?? libUtils.returnHash(post.title, post.link, post.pubDate ?? Date.now());
|
||||
tempPostStorage.getRecordBy('post_guid', post.postId, (err, existingRecord) => {
|
||||
if (err) throw err;
|
||||
|
||||
@@ -152,15 +200,19 @@ exports.updateFeeds = (client) => {
|
||||
log.DEBUG("Saved results: ", saveResults);
|
||||
return recordResolve("Saved results", saveResults);
|
||||
}
|
||||
});
|
||||
})
|
||||
})
|
||||
});
|
||||
})
|
||||
})
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.removeSource(source.link);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
log.ERROR("Error Parsing Feed: ", source.link, err);
|
||||
this.removeSource(source.link);
|
||||
throw err;
|
||||
}
|
||||
Promise.all(recordPromiseArray).then((values) => {
|
||||
|
||||
@@ -25,17 +25,41 @@ var Connection = mysql.createPool({
|
||||
});
|
||||
|
||||
// Helper Functions
|
||||
// Function to run and handle SQL errors
|
||||
/**
|
||||
* Function to run and handle SQL errors
|
||||
* @param {string} sqlQuery The SQL query string
|
||||
* @param {*} connection The SQL connection to be used to query
|
||||
* @param {function} callback The callback function to be called with an error or the results
|
||||
* @param {number} _retry Set by error retry, increments the number a query has been retried to increase wait time and track a specific query
|
||||
*/
|
||||
function runSQL(sqlQuery, connection, callback = (err, rows) => {
|
||||
log.ERROR(err);
|
||||
throw err;
|
||||
}) {
|
||||
}, _retry = 0) {
|
||||
// Start the MySQL Connection
|
||||
if (!connection) connection = Connection;
|
||||
connection.query(sqlQuery, (err, rows) => {
|
||||
if (err) {
|
||||
log.ERROR("SQL Error on query:", sqlQuery, err);
|
||||
return callback(err, undefined);
|
||||
if (err.code === "EHOSTUNREACH") {
|
||||
// DB Connection is unavailable
|
||||
let retryTimeout;
|
||||
switch(_retry){
|
||||
case 0:
|
||||
retryTimeout = 30000;
|
||||
break;
|
||||
case retry < 15:
|
||||
retryTimeout = 30000 + retry * 15000;
|
||||
break;
|
||||
default:
|
||||
log.ERROR("Retried Database 15 times over, please check connection status and restart the app", sqlQuery, err);
|
||||
return callback(err, undefined);
|
||||
}
|
||||
log.WARN(`Database connection is unavailable, waiting ${ retryTimeout / 1000 } seconds...`);
|
||||
_retry += 1
|
||||
// Wait for the retry timeout before trying the query again
|
||||
setTimeout(runSQL(sqlQuery, connection, callback, _retry));
|
||||
}
|
||||
else return callback(err, undefined);
|
||||
}
|
||||
log.VERBOSE(`SQL result for query '${sqlQuery}':`, rows);
|
||||
return callback(undefined, rows);
|
||||
|
||||
81
libUtils.js
81
libUtils.js
@@ -75,20 +75,26 @@ exports.onError = (error) => {
|
||||
}
|
||||
|
||||
exports.sendPost = (post, source, channel, callback) => {
|
||||
log.DEBUG("Sending post from source: ", post, source);
|
||||
// Reset the content parameter with the encoded parameter
|
||||
post.content = parse(post['content:encoded'] ?? post.content);
|
||||
const postTitle = post.title;
|
||||
log.DEBUG("Sending post from source: ", post, source);
|
||||
const postTitle = String(post.title).substring(0, 150);
|
||||
const postLink = post.link;
|
||||
// Get the post content and trim it to length or add a placeholder if necessary
|
||||
var postText = String(post.content.text);
|
||||
if (postText.length >= 300) postText = `${postText.slice(0, 300).substring(0, Math.min(String(post.content.text).length, String(post.content.text).lastIndexOf(" ")))}...`;
|
||||
else if (postText.length === 0) postText = `*This post has no content* [Direct Link](${post.link})`;
|
||||
var postContent = postText;
|
||||
let postContent;
|
||||
|
||||
if (post.content) {
|
||||
// Reset the content parameter with the encoded parameter
|
||||
post.content = parse(post['content:encoded'] ?? post.content);
|
||||
// Get the post content and trim it to length or add a placeholder if necessary
|
||||
var postText = String(post.content.text);
|
||||
if (postText.length >= 3800) postText = `${postText.slice(0, 3800).substring(0, Math.min(String(post.content.text).length, String(post.content.text).lastIndexOf(" ")))} [...](${post.link})`;
|
||||
else if (postText.length === 0) postText = `*This post has no content* [Direct Link](${post.link})`;
|
||||
postContent = postText;
|
||||
}
|
||||
else postContent = `*This post has no content* [Direct Link](${post.link})`;
|
||||
|
||||
// Check for embedded youtube videos and add the first four as links
|
||||
const ytVideos = String(post.content).match(youtubeVideoRegex);
|
||||
if (ytVideos) {
|
||||
for (const ytVideo of ytVideos.slice(0,4)){
|
||||
for (var ytVideo of ytVideos.slice(0,4)){
|
||||
// If the video is an embed, replace the embed to make it watchable
|
||||
if (ytVideo.includes("embed")) ytVideo = ytVideo.replace("embed/", "watch?v=");
|
||||
postContent += `\nEmbeded Video from Post: [YouTube](${ytVideo})`
|
||||
@@ -97,23 +103,27 @@ exports.sendPost = (post, source, channel, callback) => {
|
||||
log.DEBUG("Post content: ", postContent);
|
||||
|
||||
const postId = post.postId;
|
||||
const postPubDate = new Date(post.pubDate).toISOString() ?? new Date().toISOString();
|
||||
if (!post.pubDate) post.pubDate = Date.now();
|
||||
const postPubDate = new Date(post.pubDate).toISOString();
|
||||
|
||||
var postSourceLink = source.title;
|
||||
var postImage = post.image ?? undefined;
|
||||
|
||||
if (!postImage){
|
||||
const linksInPost = post.content.querySelectorAll("a");
|
||||
if (linksInPost) {
|
||||
log.DEBUG("Found links in post:", linksInPost);
|
||||
for (const link of linksInPost) {
|
||||
// Check to see if this link is a youtube video that was already found, if so skip it
|
||||
if (ytVideos?.includes(link)) continue;
|
||||
const images = String(link.getAttribute("href")).match(imageRegex);
|
||||
log.DEBUG("Images found in post:", images);
|
||||
if (images) {
|
||||
postImage = images[0];
|
||||
}
|
||||
}
|
||||
if (post.content){
|
||||
const linksInPost = post.content.querySelectorAll("a");
|
||||
if (linksInPost) {
|
||||
log.DEBUG("Found links in post:", linksInPost);
|
||||
for (const link of linksInPost) {
|
||||
// Check to see if this link is a youtube video that was already found, if so skip it
|
||||
if (ytVideos?.includes(link)) continue;
|
||||
const images = String(link.getAttribute("href")).match(imageRegex);
|
||||
log.DEBUG("Images found in post:", images);
|
||||
if (images) {
|
||||
postImage = images[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,15 +133,18 @@ exports.sendPost = (post, source, channel, callback) => {
|
||||
.setColor(0x0099FF)
|
||||
.setTitle(postTitle)
|
||||
.setURL(postLink)
|
||||
.addFields({ name: "Post Content", value: postContent, inline: false })
|
||||
.addFields({ name: 'Published', value: postPubDate, inline: true })
|
||||
.addFields({ name: 'Source', value: postSourceLink, inline: true });
|
||||
.addFields({ name: 'Source', value: postSourceLink, inline: true })
|
||||
.addFields({ name: 'Published', value: postPubDate, inline: true });
|
||||
|
||||
// TODO - If there is more than one image, create a canvas and post the created canvas
|
||||
if (postImage) {
|
||||
log.DEBUG("Image from post:", postImage);
|
||||
rssMessage.setImage(postImage);
|
||||
}
|
||||
|
||||
//Add the main content if it's present
|
||||
postContent = postContent.slice(0, 4090).trim();
|
||||
if (postContent) rssMessage.setDescription( postContent );
|
||||
|
||||
channel.send({ embeds: [rssMessage] });
|
||||
|
||||
@@ -140,11 +153,23 @@ exports.sendPost = (post, source, channel, callback) => {
|
||||
return callback(undefined, true);
|
||||
}
|
||||
catch (err){
|
||||
log.ERROR("Error sending message: ", err);
|
||||
log.ERROR("Error sending message: ", postTitle, postId, postContent, postPubDate, err);
|
||||
return callback(err, undefined);
|
||||
}
|
||||
}
|
||||
|
||||
exports.returnHash = (...stringsIncluded) => {
|
||||
return crypto.createHash('sha1').update(`${stringsIncluded.join("-<<??//\\\\??>>-")}`).digest("base64");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a key exists in an array of objects
|
||||
* @param {*} key The key to search for
|
||||
* @param {*} array The object to search for the key
|
||||
* @returns {boolean} If the key exists in the object
|
||||
*/
|
||||
exports.checkForKeyInArrayOfObjects = (key, array) => {
|
||||
return array.filter(function (o) {
|
||||
return o.hasOwnProperty(key);
|
||||
}).length > 0;
|
||||
}
|
||||
|
||||
@@ -27,12 +27,7 @@
|
||||
"openai": "~3.1.0",
|
||||
"parse-files": "~0.1.1",
|
||||
"rss-parser": "~3.12.0",
|
||||
"user-agents": "~1.0.1303",
|
||||
"@improbable-eng/grpc-web": "~0.15.0",
|
||||
"@improbable-eng/grpc-web-node-http-transport": "~0.15.0",
|
||||
"google-protobuf": "~3.21.2",
|
||||
"grpc": "~1.24.11",
|
||||
"typescript": "~4.9.5"
|
||||
"user-agents": "~1.0.1303"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
|
||||
15
update.sh
Executable file
15
update.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
# Stating Message
|
||||
echo "<!-- UPDATING ---!>"
|
||||
|
||||
# TODO - Add an updater for Stable Diffusion API
|
||||
|
||||
# Update the git Repo
|
||||
git fetch -a -p
|
||||
git pull
|
||||
|
||||
# Install any new libraries
|
||||
npm i
|
||||
|
||||
# Update complete message
|
||||
echo "<!--- UPDATE COMPLETE! ---!>"
|
||||
@@ -18,7 +18,7 @@ exports.DebugBuilder = class DebugBuilder {
|
||||
this.ERROR = (...messageParts) => {
|
||||
const error = debug(`${appName}:${fileName}:ERROR`);
|
||||
error(messageParts);
|
||||
if (process.env.EXIT_ON_ERROR) setTimeout(process.exit, process.env.EXIT_ON_ERROR_DELAY ?? 0);
|
||||
if (process.env.EXIT_ON_ERROR && process.env.EXIT_ON_ERROR > 0) setTimeout(process.exit, process.env.EXIT_ON_ERROR_DELAY ?? 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ const mysql = require('mysql');
|
||||
const databaseConfig = require('../config/databaseConfig');
|
||||
const utils = require('./utils');
|
||||
|
||||
const connection = mysql.createConnection({
|
||||
const connection = mysql.createPool({
|
||||
host: databaseConfig.database_host,
|
||||
user: databaseConfig.database_user,
|
||||
password: databaseConfig.database_password,
|
||||
@@ -11,8 +11,6 @@ const connection = mysql.createConnection({
|
||||
|
||||
const nodesTable = `${databaseConfig.database_database}.nodes`;
|
||||
|
||||
connection.connect()
|
||||
|
||||
/** Get all nodes the server knows about regardless of status
|
||||
* @param {*} callback Callback function
|
||||
*/
|
||||
|
||||
@@ -1,263 +0,0 @@
|
||||
// Debug
|
||||
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||
const log = new DebugBuilder("server", "stabilityController");
|
||||
|
||||
// Modules
|
||||
const Generation = require("../controllers/stabilityAi/generation_pb");
|
||||
const { GenerationServiceClient } = require("../controllers/stabilityAi/generation_pb_service");
|
||||
const fs = require("fs");
|
||||
|
||||
const { grpc } = require("@improbable-eng/grpc-web");
|
||||
const GRPCWeb = grpc;
|
||||
|
||||
const path = require("node:path");
|
||||
|
||||
function isImageArtifact(artifact) {
|
||||
log.DEBUG("Checking if image is an artifact", artifact);
|
||||
if (artifact.getType() !== Generation.ArtifactType.ARTIFACT_IMAGE) return false;
|
||||
if (artifact.getFinishReason() !== Generation.FinishReason.NULL) return false;
|
||||
return artifact.hasBinary();
|
||||
}
|
||||
exports.isImageArtifact = isImageArtifact;
|
||||
|
||||
function isNSFWFilteredArtifact(artifact) {
|
||||
if (artifact.getType() !== Generation.ArtifactType.ARTIFACT_IMAGE) return false;
|
||||
if (artifact.getFinishReason() !== Generation.FinishReason.FILTER) return false;
|
||||
return true;
|
||||
}
|
||||
exports.isNSFWFilteredArtifact = isNSFWFilteredArtifact;
|
||||
|
||||
/** Builds a generation request for a specified engine with the specified parameters. */
|
||||
function buildGenerationRequest(engineID, params) {
|
||||
if (params.type === "upscaling") {
|
||||
const request = new Generation.Request()
|
||||
request.setEngineId(engineID)
|
||||
request.setRequestedType(Generation.ArtifactType.ARTIFACT_IMAGE)
|
||||
request.setClassifier(new Generation.ClassifierParameters())
|
||||
|
||||
const imageParams = new Generation.ImageParameters()
|
||||
if ("width" in params && !!params.width) {
|
||||
imageParams.setWidth(params.width)
|
||||
} else if ("height" in params && !!params.height) {
|
||||
imageParams.setHeight(params.height)
|
||||
}
|
||||
request.setImage(imageParams)
|
||||
request.addPrompt(createInitImagePrompt(params.initImage))
|
||||
|
||||
return request
|
||||
}
|
||||
|
||||
const imageParams = new Generation.ImageParameters()
|
||||
if (params.type === "text-to-image") {
|
||||
params.width && imageParams.setWidth(params.width)
|
||||
params.height && imageParams.setHeight(params.height)
|
||||
}
|
||||
|
||||
// Set the number of images to generate (Default 1)
|
||||
params.samples && imageParams.setSamples(params.samples)
|
||||
|
||||
// Set the steps (Default 30)
|
||||
// Represents the amount of inference steps performed on image generation.
|
||||
params.steps && imageParams.setSteps(params.steps)
|
||||
|
||||
// Set the seed (Default 0)
|
||||
// Including a seed will cause the results to be deterministic.
|
||||
// Omitting the seed or setting it to `0` will do the opposite.
|
||||
params.seed && imageParams.addSeed(params.seed)
|
||||
|
||||
// Set the sampler (Default 'automatic')
|
||||
// Omitting this value enables 'automatic' mode where we choose the best sampler for you based
|
||||
// on the current payload. For example, since CLIP guidance only works on ancestral samplers,
|
||||
// when CLIP guidance is enabled, we will automatically choose an ancestral sampler for you.
|
||||
if (params.sampler) {
|
||||
const transformType = new Generation.TransformType()
|
||||
transformType.setDiffusion(params.sampler)
|
||||
imageParams.setTransform(transformType)
|
||||
}
|
||||
|
||||
// Set the Engine
|
||||
// At the time of writing, valid engines are:
|
||||
// stable-diffusion-v1,
|
||||
// stable-diffusion-v1-5
|
||||
// stable-diffusion-512-v2-0
|
||||
// stable-diffusion-768-v2-0
|
||||
// stable-diffusion-512-v2-1
|
||||
// stable-diffusion-768-v2-1
|
||||
// stable-inpainting-v1-0
|
||||
// stable-inpainting-512-v2-0
|
||||
// esrgan-v1-x2plus
|
||||
const request = new Generation.Request()
|
||||
request.setEngineId(engineID)
|
||||
request.setRequestedType(Generation.ArtifactType.ARTIFACT_IMAGE)
|
||||
request.setClassifier(new Generation.ClassifierParameters())
|
||||
|
||||
// Set the CFG scale (Default 7)
|
||||
// Influences how strongly your generation is guided to match your prompt. Higher values match closer.
|
||||
const samplerParams = new Generation.SamplerParameters()
|
||||
params.cfgScale && samplerParams.setCfgScale(params.cfgScale)
|
||||
|
||||
const stepParams = new Generation.StepParameter()
|
||||
stepParams.setScaledStep(0)
|
||||
stepParams.setSampler(samplerParams)
|
||||
|
||||
const scheduleParams = new Generation.ScheduleParameters()
|
||||
if (params.type === "image-to-image") {
|
||||
// If we're doing image-to-image generation then we need to configure
|
||||
// how much influence the initial image has on the diffusion process
|
||||
scheduleParams.setStart(params.stepScheduleStart)
|
||||
if (params.stepScheduleEnd) {
|
||||
scheduleParams.setEnd(params.stepScheduleEnd)
|
||||
}
|
||||
} else if (params.type === "image-to-image-masking") {
|
||||
// Step schedule start is always 1 for masking requests
|
||||
scheduleParams.setStart(1)
|
||||
}
|
||||
|
||||
stepParams.setSchedule(scheduleParams)
|
||||
|
||||
// Set CLIP Guidance (Default: None)
|
||||
// NOTE: This only works with ancestral samplers. Omitting the sampler parameter above will ensure
|
||||
// that we automatically choose an ancestral sampler for you when CLIP guidance is enabled.
|
||||
if (params.clipGuidancePreset) {
|
||||
const guidanceParameters = new Generation.GuidanceParameters()
|
||||
guidanceParameters.setGuidancePreset(params.clipGuidancePreset)
|
||||
stepParams.setGuidance(guidanceParameters)
|
||||
}
|
||||
|
||||
imageParams.addParameters(stepParams)
|
||||
request.setImage(imageParams)
|
||||
|
||||
params.prompts.forEach(textPrompt => {
|
||||
const prompt = new Generation.Prompt()
|
||||
prompt.setText(textPrompt.text)
|
||||
|
||||
// If provided, set the prompt's weight (use negative values for negative weighting)
|
||||
if (textPrompt.weight) {
|
||||
const promptParameters = new Generation.PromptParameters()
|
||||
promptParameters.setWeight(textPrompt.weight)
|
||||
prompt.setParameters(promptParameters)
|
||||
}
|
||||
|
||||
request.addPrompt(prompt)
|
||||
})
|
||||
|
||||
// Add image prompts if we're doing some kind of image-to-image generation or upscaling
|
||||
if (params.type === "image-to-image") {
|
||||
request.addPrompt(createInitImagePrompt(params.initImage))
|
||||
} else if (params.type === "image-to-image-masking") {
|
||||
request.addPrompt(createInitImagePrompt(params.initImage))
|
||||
request.addPrompt(createMaskImagePrompt(params.maskImage))
|
||||
}
|
||||
|
||||
return request
|
||||
}
|
||||
exports.buildGenerationRequest = buildGenerationRequest;
|
||||
|
||||
function createInitImagePrompt(imageBinary) {
|
||||
const initImageArtifact = new Generation.Artifact()
|
||||
initImageArtifact.setBinary(imageBinary)
|
||||
initImageArtifact.setType(Generation.ArtifactType.ARTIFACT_IMAGE)
|
||||
|
||||
const initImageParameters = new Generation.PromptParameters()
|
||||
initImageParameters.setInit(true)
|
||||
|
||||
const initImagePrompt = new Generation.Prompt()
|
||||
initImagePrompt.setParameters(initImageParameters)
|
||||
initImagePrompt.setArtifact(initImageArtifact)
|
||||
|
||||
return initImagePrompt
|
||||
}
|
||||
|
||||
function createMaskImagePrompt(imageBinary) {
|
||||
const maskImageArtifact = new Generation.Artifact()
|
||||
maskImageArtifact.setBinary(imageBinary)
|
||||
maskImageArtifact.setType(Generation.ArtifactType.ARTIFACT_MASK)
|
||||
|
||||
const maskImagePrompt = new Generation.Prompt()
|
||||
maskImagePrompt.setArtifact(maskImageArtifact)
|
||||
|
||||
return maskImagePrompt
|
||||
}
|
||||
|
||||
/** Executes a GenerationRequest, abstracting the gRPC streaming result behind a Promise */
|
||||
async function executeGenerationRequest(
|
||||
generationClient,
|
||||
request,
|
||||
metadata
|
||||
) {
|
||||
try {
|
||||
const stream = generationClient.generate(request, metadata)
|
||||
const answers = await new Promise((resolve, reject) => {
|
||||
const answers = new Array()
|
||||
|
||||
stream.on("data", data => answers.push(data))
|
||||
stream.on("end", () => resolve(answers))
|
||||
stream.on("status", status => {
|
||||
if (status.code === 0) return
|
||||
reject(status.details)
|
||||
})
|
||||
})
|
||||
|
||||
return extractArtifacts(answers)
|
||||
} catch (err) {
|
||||
return err instanceof Error ? err : new Error(JSON.stringify(err))
|
||||
}
|
||||
}
|
||||
exports.executeGenerationRequest = executeGenerationRequest;
|
||||
|
||||
function extractArtifacts(answers) {
|
||||
const imageArtifacts = new Array()
|
||||
const filteredArtifacts = new Array()
|
||||
|
||||
for (const answer of answers) {
|
||||
for (const artifact of answer.getArtifactsList()) {
|
||||
if (isImageArtifact(artifact)) {
|
||||
imageArtifacts.push(artifact)
|
||||
} else if (isNSFWFilteredArtifact(artifact)) {
|
||||
filteredArtifacts.push(artifact)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { filteredArtifacts, imageArtifacts }
|
||||
}
|
||||
|
||||
/** Generation completion handler - replace this with your own logic */
|
||||
function onGenerationComplete(response) {
|
||||
if (response instanceof Error) {
|
||||
log.ERROR("Generation failed", response)
|
||||
throw response
|
||||
}
|
||||
|
||||
log.DEBUG(
|
||||
`${response.imageArtifacts.length} image${
|
||||
response.imageArtifacts.length > 1 ? "s" : ""
|
||||
} were successfully generated.`
|
||||
)
|
||||
|
||||
// Do something with NSFW filtered artifacts
|
||||
if (response.filteredArtifacts.length > 0) {
|
||||
log.DEBUG(
|
||||
`${response.filteredArtifacts.length} artifact` +
|
||||
`${response.filteredArtifacts.length > 1 ? "s" : ""}` +
|
||||
` were filtered by the NSFW classifier and need to be retried.`
|
||||
)
|
||||
}
|
||||
|
||||
// Do something with the successful image artifacts
|
||||
response.imageArtifacts.forEach(artifact => {
|
||||
try {
|
||||
const writePath =
|
||||
fs.writeFileSync(
|
||||
path.resolve(__dirname, `../.generations/image-${artifact.getSeed()}.png`),
|
||||
Buffer.from(artifact.getBinary_asU8())
|
||||
)
|
||||
} catch (error) {
|
||||
log.ERROR("Failed to write resulting image to disk", error)
|
||||
}
|
||||
})
|
||||
|
||||
// For browser implementations: you could use the `artifact.getBinary_asB64()` method to get a
|
||||
// base64 encoded string and then create a data URL from that and display it in an <img> tag.
|
||||
}
|
||||
exports.onGenerationComplete = onGenerationComplete;
|
||||
Reference in New Issue
Block a user