Compare commits
4 Commits
main
...
e874e3c785
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e874e3c785 | ||
|
|
53256cdc42 | ||
|
|
3c05395ddf | ||
| 20856734de |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,3 +4,4 @@ package-lock.json
|
|||||||
*.bak
|
*.bak
|
||||||
*.log
|
*.log
|
||||||
*._.*
|
*._.*
|
||||||
|
.generations/
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
const { submitImagePromptTransaction, DALLE_COLOR } = require("../controllers/openAiController");
|
|
||||||
const { SlashCommandBuilder } = require('discord.js');
|
const { SlashCommandBuilder } = require('discord.js');
|
||||||
const { DebugBuilder } = require("../utilities/debugBuilder");
|
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||||
|
const { submitImagePromptTransaction, DALLE_COLOR } = require("../controllers/openAiController");
|
||||||
const log = new DebugBuilder("server", "imagine");
|
const log = new DebugBuilder("server", "imagine");
|
||||||
const { EmmeliaEmbedBuilder } = require('../libUtils');
|
const { EmmeliaEmbedBuilder } = require('../libUtils');
|
||||||
|
|
||||||
|
|||||||
33
commands/stability.js
Normal file
33
commands/stability.js
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
const { SlashCommandBuilder } = require('discord.js');
|
||||||
|
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||||
|
const log = new DebugBuilder("server", "stability");
|
||||||
|
const { submitImageGenerationTransaction, STABILITY_COLOR } = require("../controllers/stabilityController");
|
||||||
|
const { EmmeliaEmbedBuilder } = require('../libUtils');
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
data: new SlashCommandBuilder()
|
||||||
|
.setName('stability')
|
||||||
|
.setDescription('Replies with your input!'),
|
||||||
|
/*
|
||||||
|
.addStringOption(option =>
|
||||||
|
option.setName('input')
|
||||||
|
.setDescription('The input to echo back')
|
||||||
|
.setRequired(false)
|
||||||
|
.addChoices()),
|
||||||
|
*/
|
||||||
|
example: "stability",
|
||||||
|
isPrivileged: true,
|
||||||
|
requiresTokens: false,
|
||||||
|
defaultTokenUsage: 0,
|
||||||
|
deferInitialReply: true,
|
||||||
|
async execute(interaction) {
|
||||||
|
try{
|
||||||
|
await submitImageGenerationTransaction();
|
||||||
|
await interaction.editReply('**Pong.**'); // TODO - Add insults as the response to this command
|
||||||
|
}catch(err){
|
||||||
|
log.ERROR(err)
|
||||||
|
//await interaction.reply(err.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -14,8 +14,8 @@ exports.RSSController = class RSSController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async start(){
|
async start(){
|
||||||
// Wait for the refresh period before starting rss feeds, so the rest of the bot can start
|
// Wait 30 seconds for the rest of the bot to start before starting rss feeds
|
||||||
await new Promise(resolve => setTimeout(resolve, refreshInterval));
|
await new Promise(resolve => setTimeout(resolve, 30000));
|
||||||
|
|
||||||
log.INFO("Starting RSS Controller");
|
log.INFO("Starting RSS Controller");
|
||||||
// Get initial feeds before the starting the infinite loop
|
// Get initial feeds before the starting the infinite loop
|
||||||
|
|||||||
71
controllers/stabilityAi/generation_grpc_pb.js
Normal file
71
controllers/stabilityAi/generation_grpc_pb.js
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
// GENERATED CODE -- DO NOT EDIT!
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
var grpc = require('grpc');
|
||||||
|
var generation_pb = require('./generation_pb.js');
|
||||||
|
var google_protobuf_struct_pb = require('google-protobuf/google/protobuf/struct_pb.js');
|
||||||
|
var tensors_pb = require('./tensors_pb.js');
|
||||||
|
|
||||||
|
function serialize_gooseai_Answer(arg) {
|
||||||
|
if (!(arg instanceof generation_pb.Answer)) {
|
||||||
|
throw new Error('Expected argument of type gooseai.Answer');
|
||||||
|
}
|
||||||
|
return Buffer.from(arg.serializeBinary());
|
||||||
|
}
|
||||||
|
|
||||||
|
function deserialize_gooseai_Answer(buffer_arg) {
|
||||||
|
return generation_pb.Answer.deserializeBinary(new Uint8Array(buffer_arg));
|
||||||
|
}
|
||||||
|
|
||||||
|
function serialize_gooseai_ChainRequest(arg) {
|
||||||
|
if (!(arg instanceof generation_pb.ChainRequest)) {
|
||||||
|
throw new Error('Expected argument of type gooseai.ChainRequest');
|
||||||
|
}
|
||||||
|
return Buffer.from(arg.serializeBinary());
|
||||||
|
}
|
||||||
|
|
||||||
|
function deserialize_gooseai_ChainRequest(buffer_arg) {
|
||||||
|
return generation_pb.ChainRequest.deserializeBinary(new Uint8Array(buffer_arg));
|
||||||
|
}
|
||||||
|
|
||||||
|
function serialize_gooseai_Request(arg) {
|
||||||
|
if (!(arg instanceof generation_pb.Request)) {
|
||||||
|
throw new Error('Expected argument of type gooseai.Request');
|
||||||
|
}
|
||||||
|
return Buffer.from(arg.serializeBinary());
|
||||||
|
}
|
||||||
|
|
||||||
|
function deserialize_gooseai_Request(buffer_arg) {
|
||||||
|
return generation_pb.Request.deserializeBinary(new Uint8Array(buffer_arg));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//
|
||||||
|
// gRPC services
|
||||||
|
//
|
||||||
|
var GenerationServiceService = exports.GenerationServiceService = {
|
||||||
|
generate: {
|
||||||
|
path: '/gooseai.GenerationService/Generate',
|
||||||
|
requestStream: false,
|
||||||
|
responseStream: true,
|
||||||
|
requestType: generation_pb.Request,
|
||||||
|
responseType: generation_pb.Answer,
|
||||||
|
requestSerialize: serialize_gooseai_Request,
|
||||||
|
requestDeserialize: deserialize_gooseai_Request,
|
||||||
|
responseSerialize: serialize_gooseai_Answer,
|
||||||
|
responseDeserialize: deserialize_gooseai_Answer,
|
||||||
|
},
|
||||||
|
chainGenerate: {
|
||||||
|
path: '/gooseai.GenerationService/ChainGenerate',
|
||||||
|
requestStream: false,
|
||||||
|
responseStream: true,
|
||||||
|
requestType: generation_pb.ChainRequest,
|
||||||
|
responseType: generation_pb.Answer,
|
||||||
|
requestSerialize: serialize_gooseai_ChainRequest,
|
||||||
|
requestDeserialize: deserialize_gooseai_ChainRequest,
|
||||||
|
responseSerialize: serialize_gooseai_Answer,
|
||||||
|
responseDeserialize: deserialize_gooseai_Answer,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.GenerationServiceClient = grpc.makeGenericClientConstructor(GenerationServiceService);
|
||||||
11988
controllers/stabilityAi/generation_pb.js
Normal file
11988
controllers/stabilityAi/generation_pb.js
Normal file
File diff suppressed because it is too large
Load Diff
117
controllers/stabilityAi/generation_pb_service.js
Normal file
117
controllers/stabilityAi/generation_pb_service.js
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
// package: gooseai
|
||||||
|
// file: generation.proto
|
||||||
|
|
||||||
|
var generation_pb = require("./generation_pb");
|
||||||
|
var grpc = require("@improbable-eng/grpc-web").grpc;
|
||||||
|
|
||||||
|
var GenerationService = (function () {
|
||||||
|
function GenerationService() {}
|
||||||
|
GenerationService.serviceName = "gooseai.GenerationService";
|
||||||
|
return GenerationService;
|
||||||
|
}());
|
||||||
|
|
||||||
|
GenerationService.Generate = {
|
||||||
|
methodName: "Generate",
|
||||||
|
service: GenerationService,
|
||||||
|
requestStream: false,
|
||||||
|
responseStream: true,
|
||||||
|
requestType: generation_pb.Request,
|
||||||
|
responseType: generation_pb.Answer
|
||||||
|
};
|
||||||
|
|
||||||
|
GenerationService.ChainGenerate = {
|
||||||
|
methodName: "ChainGenerate",
|
||||||
|
service: GenerationService,
|
||||||
|
requestStream: false,
|
||||||
|
responseStream: true,
|
||||||
|
requestType: generation_pb.ChainRequest,
|
||||||
|
responseType: generation_pb.Answer
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.GenerationService = GenerationService;
|
||||||
|
|
||||||
|
function GenerationServiceClient(serviceHost, options) {
|
||||||
|
this.serviceHost = serviceHost;
|
||||||
|
this.options = options || {};
|
||||||
|
}
|
||||||
|
|
||||||
|
GenerationServiceClient.prototype.generate = function generate(requestMessage, metadata) {
|
||||||
|
var listeners = {
|
||||||
|
data: [],
|
||||||
|
end: [],
|
||||||
|
status: []
|
||||||
|
};
|
||||||
|
var client = grpc.invoke(GenerationService.Generate, {
|
||||||
|
request: requestMessage,
|
||||||
|
host: this.serviceHost,
|
||||||
|
metadata: metadata,
|
||||||
|
transport: this.options.transport,
|
||||||
|
debug: this.options.debug,
|
||||||
|
onMessage: function (responseMessage) {
|
||||||
|
listeners.data.forEach(function (handler) {
|
||||||
|
handler(responseMessage);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onEnd: function (status, statusMessage, trailers) {
|
||||||
|
listeners.status.forEach(function (handler) {
|
||||||
|
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||||
|
});
|
||||||
|
listeners.end.forEach(function (handler) {
|
||||||
|
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||||
|
});
|
||||||
|
listeners = null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
on: function (type, handler) {
|
||||||
|
listeners[type].push(handler);
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
cancel: function () {
|
||||||
|
listeners = null;
|
||||||
|
client.close();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
GenerationServiceClient.prototype.chainGenerate = function chainGenerate(requestMessage, metadata) {
|
||||||
|
var listeners = {
|
||||||
|
data: [],
|
||||||
|
end: [],
|
||||||
|
status: []
|
||||||
|
};
|
||||||
|
var client = grpc.invoke(GenerationService.ChainGenerate, {
|
||||||
|
request: requestMessage,
|
||||||
|
host: this.serviceHost,
|
||||||
|
metadata: metadata,
|
||||||
|
transport: this.options.transport,
|
||||||
|
debug: this.options.debug,
|
||||||
|
onMessage: function (responseMessage) {
|
||||||
|
listeners.data.forEach(function (handler) {
|
||||||
|
handler(responseMessage);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onEnd: function (status, statusMessage, trailers) {
|
||||||
|
listeners.status.forEach(function (handler) {
|
||||||
|
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||||
|
});
|
||||||
|
listeners.end.forEach(function (handler) {
|
||||||
|
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||||
|
});
|
||||||
|
listeners = null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
on: function (type, handler) {
|
||||||
|
listeners[type].push(handler);
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
cancel: function () {
|
||||||
|
listeners = null;
|
||||||
|
client.close();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.GenerationServiceClient = GenerationServiceClient;
|
||||||
|
|
||||||
1108
controllers/stabilityAi/tensors_pb.js
Normal file
1108
controllers/stabilityAi/tensors_pb.js
Normal file
File diff suppressed because it is too large
Load Diff
60
controllers/stabilityController.js
Normal file
60
controllers/stabilityController.js
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
// Debug
|
||||||
|
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||||
|
const log = new DebugBuilder("server", "stabilityController");
|
||||||
|
|
||||||
|
// Modules
|
||||||
|
const Generation = require("./stabilityAi/generation_pb");
|
||||||
|
const { GenerationServiceClient } = require("./stabilityAi/generation_pb_service");
|
||||||
|
|
||||||
|
const { grpc } = require("@improbable-eng/grpc-web");
|
||||||
|
const GRPCWeb = grpc;
|
||||||
|
|
||||||
|
const { NodeHttpTransport } = require("@improbable-eng/grpc-web-node-http-transport");
|
||||||
|
const fs = require("fs");
|
||||||
|
const {
|
||||||
|
buildGenerationRequest,
|
||||||
|
executeGenerationRequest,
|
||||||
|
onGenerationComplete,
|
||||||
|
} = require("../utilities/stabilityHelpers");
|
||||||
|
|
||||||
|
// Set Global Color for this controller
|
||||||
|
exports.STABILITY_COLOR = 0xeb34b7;
|
||||||
|
|
||||||
|
// This is a NodeJS-specific requirement - browsers implementations should omit this line.
|
||||||
|
GRPCWeb.setDefaultTransport(NodeHttpTransport());
|
||||||
|
|
||||||
|
// Authenticate using your API key, don't commit your key to a public repository!
|
||||||
|
const metadata = new GRPCWeb.Metadata();
|
||||||
|
metadata.set("Authorization", "Bearer " + process.env.STABILITY_API_KEY);
|
||||||
|
|
||||||
|
// Create a generation client to use with all future requests
|
||||||
|
const stabilityClient = new GenerationServiceClient("https://grpc.stability.ai", {});
|
||||||
|
|
||||||
|
exports.submitImageGenerationTransaction = async () => {
|
||||||
|
const request = buildGenerationRequest("stable-diffusion-512-v2-1", {
|
||||||
|
type: "text-to-image",
|
||||||
|
prompts: [
|
||||||
|
{
|
||||||
|
text: "A dream of a distant galaxy, by Caspar David Friedrich, matte painting trending on artstation HQ",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
width: 512,
|
||||||
|
height: 512,
|
||||||
|
samples: 1,
|
||||||
|
cfgScale: 13,
|
||||||
|
steps: 10,
|
||||||
|
sampler: Generation.DiffusionSampler.SAMPLER_K_DPMPP_2M,
|
||||||
|
});
|
||||||
|
log.DEBUG("Stability request: ", request, metadata, stabilityClient);
|
||||||
|
|
||||||
|
executeGenerationRequest(stabilityClient, request, metadata)
|
||||||
|
.then((response) => {
|
||||||
|
log.DEBUG("Stability Generation response: ". response)
|
||||||
|
onGenerationComplete(response);
|
||||||
|
return;
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
log.ERROR("Failed to make text-to-image request:", error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
62
libCore.js
62
libCore.js
@@ -25,52 +25,6 @@ let parser = new Parser({
|
|||||||
var feedStorage = new FeedStorage();
|
var feedStorage = new FeedStorage();
|
||||||
var postStorage = new PostStorage();
|
var postStorage = new PostStorage();
|
||||||
|
|
||||||
// Initiate a running array of objects to keep track of sources that have no feeds/posts
|
|
||||||
/*
|
|
||||||
var runningPostsToRemove = [{
|
|
||||||
"{SOURCE URL}": {NUMBER OF TIMES IT'S BEEN REMOVED}
|
|
||||||
}]
|
|
||||||
*/
|
|
||||||
var runningPostsToRemove = {};
|
|
||||||
const sourceFailureLimit = process.env.SOURCE_FAILURE_LIMIT ?? 3;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param {*} sourceURL
|
|
||||||
*/
|
|
||||||
exports.removeSource = function removeSource(sourceURL) {
|
|
||||||
log.INFO("Removing source URL: ", sourceURL);
|
|
||||||
if (!sourceURL in runningPostsToRemove) {runningPostsToRemove[sourceURL] = 1; return;}
|
|
||||||
|
|
||||||
if (runningPostsToRemove[sourceURL] < sourceFailureLimit) {runningPostsToRemove[sourceURL] += 1; return;}
|
|
||||||
|
|
||||||
feedStorage.getRecordBy('link', sourceURL, (err, record) => {
|
|
||||||
if (err) log.ERROR("Error getting record from feedStorage", err);
|
|
||||||
|
|
||||||
if (!record) log.ERROR("No source returned from feedStorage");
|
|
||||||
feedStorage.destroy(record.id, (err, results) => {
|
|
||||||
if (err) log.ERROR("Error removing ID from results", err);
|
|
||||||
|
|
||||||
if (!results) log.WARN("No results from remove entry");
|
|
||||||
|
|
||||||
log.DEBUG("Source exceeded the limit of retries and has been removed", sourceURL);
|
|
||||||
return;
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unset a source URL from deletion if the source has not already been deleted
|
|
||||||
* @param {*} sourceURL The source URL to be unset from deletion
|
|
||||||
* @returns {*}
|
|
||||||
*/
|
|
||||||
exports.unsetRemoveSource = function unsetRemoveSource(sourceURL) {
|
|
||||||
log.INFO("Unsetting source URL from deletion (if not already deleted): ", sourceURL);
|
|
||||||
if (!sourceURL in runningPostsToRemove) return;
|
|
||||||
|
|
||||||
if (runningPostsToRemove[sourceURL] > sourceFailureLimit) return delete runningPostsToRemove[sourceURL];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds or updates new source url to configured storage
|
* Adds or updates new source url to configured storage
|
||||||
* @constructor
|
* @constructor
|
||||||
@@ -160,22 +114,20 @@ exports.updateFeeds = (client) => {
|
|||||||
// Parse the RSS feed
|
// Parse the RSS feed
|
||||||
parser.parseURL(source.link, async (err, parsedFeed) => {
|
parser.parseURL(source.link, async (err, parsedFeed) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.ERROR("Parser Error: ", runningPostsToRemove, source, err);
|
log.ERROR("Parser Error: ", source, err);
|
||||||
// Call the wrapper to make sure the site isn't just down at the time it checks and is back up the next time
|
|
||||||
this.removeSource(source.link);
|
|
||||||
reject;
|
reject;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
log.DEBUG("Parsed Feed Keys", Object.keys(parsedFeed), parsedFeed?.title);
|
||||||
if (parsedFeed?.items){
|
if (parsedFeed?.items){
|
||||||
this.unsetRemoveSource(source.link);
|
for (const post of parsedFeed.items){
|
||||||
for (const post of parsedFeed.items.reverse()){
|
|
||||||
recordPromiseArray.push(new Promise((recordResolve, recordReject) => {
|
recordPromiseArray.push(new Promise((recordResolve, recordReject) => {
|
||||||
log.DEBUG("Parsed Source Keys", Object.keys(post), post?.title);
|
log.DEBUG("Parsed Source Keys", Object.keys(post), post?.title);
|
||||||
log.VERBOSE("Post from feed: ", post);
|
log.VERBOSE("Post from feed: ", post);
|
||||||
if (!post.title || !post.link) return recordReject("Missing information from the post");
|
if (!post.title || !post.link || !post.pubDate) return recordReject("Missing information from the post");
|
||||||
if (!post.content || !post['content:encoded']) log.WARN("There is no content for post: ", post.title);
|
if (!post.content || !post['content:encoded']) log.WARN("There is no content for post: ", post.title);
|
||||||
|
|
||||||
post.postId = post.postId ?? post.guid ?? post.id ?? libUtils.returnHash(post.title, post.link, post.pubDate ?? Date.now());
|
post.postId = post.postId ?? post.guid ?? post.id ?? libUtils.returnHash(post.title, post.link, post.pubDate);
|
||||||
tempPostStorage.getRecordBy('post_guid', post.postId, (err, existingRecord) => {
|
tempPostStorage.getRecordBy('post_guid', post.postId, (err, existingRecord) => {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
|
|
||||||
@@ -206,13 +158,9 @@ exports.updateFeeds = (client) => {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
|
||||||
this.removeSource(source.link);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
log.ERROR("Error Parsing Feed: ", source.link, err);
|
log.ERROR("Error Parsing Feed: ", source.link, err);
|
||||||
this.removeSource(source.link);
|
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
Promise.all(recordPromiseArray).then((values) => {
|
Promise.all(recordPromiseArray).then((values) => {
|
||||||
|
|||||||
@@ -25,41 +25,17 @@ var Connection = mysql.createPool({
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Helper Functions
|
// Helper Functions
|
||||||
/**
|
// Function to run and handle SQL errors
|
||||||
* Function to run and handle SQL errors
|
|
||||||
* @param {string} sqlQuery The SQL query string
|
|
||||||
* @param {*} connection The SQL connection to be used to query
|
|
||||||
* @param {function} callback The callback function to be called with an error or the results
|
|
||||||
* @param {number} _retry Set by error retry, increments the number a query has been retried to increase wait time and track a specific query
|
|
||||||
*/
|
|
||||||
function runSQL(sqlQuery, connection, callback = (err, rows) => {
|
function runSQL(sqlQuery, connection, callback = (err, rows) => {
|
||||||
log.ERROR(err);
|
log.ERROR(err);
|
||||||
throw err;
|
throw err;
|
||||||
}, _retry = 0) {
|
}) {
|
||||||
// Start the MySQL Connection
|
// Start the MySQL Connection
|
||||||
if (!connection) connection = Connection;
|
if (!connection) connection = Connection;
|
||||||
connection.query(sqlQuery, (err, rows) => {
|
connection.query(sqlQuery, (err, rows) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err.code === "EHOSTUNREACH") {
|
log.ERROR("SQL Error on query:", sqlQuery, err);
|
||||||
// DB Connection is unavailable
|
return callback(err, undefined);
|
||||||
let retryTimeout;
|
|
||||||
switch(_retry){
|
|
||||||
case 0:
|
|
||||||
retryTimeout = 30000;
|
|
||||||
break;
|
|
||||||
case retry < 15:
|
|
||||||
retryTimeout = 30000 + retry * 15000;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
log.ERROR("Retried Database 15 times over, please check connection status and restart the app", sqlQuery, err);
|
|
||||||
return callback(err, undefined);
|
|
||||||
}
|
|
||||||
log.WARN(`Database connection is unavailable, waiting ${ retryTimeout / 1000 } seconds...`);
|
|
||||||
_retry += 1
|
|
||||||
// Wait for the retry timeout before trying the query again
|
|
||||||
setTimeout(runSQL(sqlQuery, connection, callback, _retry));
|
|
||||||
}
|
|
||||||
else return callback(err, undefined);
|
|
||||||
}
|
}
|
||||||
log.VERBOSE(`SQL result for query '${sqlQuery}':`, rows);
|
log.VERBOSE(`SQL result for query '${sqlQuery}':`, rows);
|
||||||
return callback(undefined, rows);
|
return callback(undefined, rows);
|
||||||
|
|||||||
73
libUtils.js
73
libUtils.js
@@ -76,25 +76,19 @@ exports.onError = (error) => {
|
|||||||
|
|
||||||
exports.sendPost = (post, source, channel, callback) => {
|
exports.sendPost = (post, source, channel, callback) => {
|
||||||
log.DEBUG("Sending post from source: ", post, source);
|
log.DEBUG("Sending post from source: ", post, source);
|
||||||
const postTitle = String(post.title).substring(0, 150);
|
// Reset the content parameter with the encoded parameter
|
||||||
|
post.content = parse(post['content:encoded'] ?? post.content);
|
||||||
|
const postTitle = post.title;
|
||||||
const postLink = post.link;
|
const postLink = post.link;
|
||||||
let postContent;
|
// Get the post content and trim it to length or add a placeholder if necessary
|
||||||
|
var postText = String(post.content.text);
|
||||||
if (post.content) {
|
if (postText.length >= 300) postText = `${postText.slice(0, 300).substring(0, Math.min(String(post.content.text).length, String(post.content.text).lastIndexOf(" ")))}...`;
|
||||||
// Reset the content parameter with the encoded parameter
|
else if (postText.length === 0) postText = `*This post has no content* [Direct Link](${post.link})`;
|
||||||
post.content = parse(post['content:encoded'] ?? post.content);
|
var postContent = postText;
|
||||||
// Get the post content and trim it to length or add a placeholder if necessary
|
|
||||||
var postText = String(post.content.text);
|
|
||||||
if (postText.length >= 3800) postText = `${postText.slice(0, 3800).substring(0, Math.min(String(post.content.text).length, String(post.content.text).lastIndexOf(" ")))} [...](${post.link})`;
|
|
||||||
else if (postText.length === 0) postText = `*This post has no content* [Direct Link](${post.link})`;
|
|
||||||
postContent = postText;
|
|
||||||
}
|
|
||||||
else postContent = `*This post has no content* [Direct Link](${post.link})`;
|
|
||||||
|
|
||||||
// Check for embedded youtube videos and add the first four as links
|
// Check for embedded youtube videos and add the first four as links
|
||||||
const ytVideos = String(post.content).match(youtubeVideoRegex);
|
const ytVideos = String(post.content).match(youtubeVideoRegex);
|
||||||
if (ytVideos) {
|
if (ytVideos) {
|
||||||
for (var ytVideo of ytVideos.slice(0,4)){
|
for (const ytVideo of ytVideos.slice(0,4)){
|
||||||
// If the video is an embed, replace the embed to make it watchable
|
// If the video is an embed, replace the embed to make it watchable
|
||||||
if (ytVideo.includes("embed")) ytVideo = ytVideo.replace("embed/", "watch?v=");
|
if (ytVideo.includes("embed")) ytVideo = ytVideo.replace("embed/", "watch?v=");
|
||||||
postContent += `\nEmbeded Video from Post: [YouTube](${ytVideo})`
|
postContent += `\nEmbeded Video from Post: [YouTube](${ytVideo})`
|
||||||
@@ -103,25 +97,21 @@ exports.sendPost = (post, source, channel, callback) => {
|
|||||||
log.DEBUG("Post content: ", postContent);
|
log.DEBUG("Post content: ", postContent);
|
||||||
|
|
||||||
const postId = post.postId;
|
const postId = post.postId;
|
||||||
if (!post.pubDate) post.pubDate = Date.now();
|
const postPubDate = new Date(post.pubDate).toISOString() ?? new Date().toISOString();
|
||||||
const postPubDate = new Date(post.pubDate).toISOString();
|
|
||||||
|
|
||||||
var postSourceLink = source.title;
|
var postSourceLink = source.title;
|
||||||
var postImage = post.image ?? undefined;
|
var postImage = post.image ?? undefined;
|
||||||
|
|
||||||
if (!postImage){
|
if (!postImage){
|
||||||
if (post.content){
|
const linksInPost = post.content.querySelectorAll("a");
|
||||||
const linksInPost = post.content.querySelectorAll("a");
|
if (linksInPost) {
|
||||||
if (linksInPost) {
|
log.DEBUG("Found links in post:", linksInPost);
|
||||||
log.DEBUG("Found links in post:", linksInPost);
|
for (const link of linksInPost) {
|
||||||
for (const link of linksInPost) {
|
// Check to see if this link is a youtube video that was already found, if so skip it
|
||||||
// Check to see if this link is a youtube video that was already found, if so skip it
|
if (ytVideos?.includes(link)) continue;
|
||||||
if (ytVideos?.includes(link)) continue;
|
const images = String(link.getAttribute("href")).match(imageRegex);
|
||||||
const images = String(link.getAttribute("href")).match(imageRegex);
|
log.DEBUG("Images found in post:", images);
|
||||||
log.DEBUG("Images found in post:", images);
|
if (images) {
|
||||||
if (images) {
|
postImage = images[0];
|
||||||
postImage = images[0];
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -133,8 +123,9 @@ exports.sendPost = (post, source, channel, callback) => {
|
|||||||
.setColor(0x0099FF)
|
.setColor(0x0099FF)
|
||||||
.setTitle(postTitle)
|
.setTitle(postTitle)
|
||||||
.setURL(postLink)
|
.setURL(postLink)
|
||||||
.addFields({ name: 'Source', value: postSourceLink, inline: true })
|
.addFields({ name: "Post Content", value: postContent, inline: false })
|
||||||
.addFields({ name: 'Published', value: postPubDate, inline: true });
|
.addFields({ name: 'Published', value: postPubDate, inline: true })
|
||||||
|
.addFields({ name: 'Source', value: postSourceLink, inline: true });
|
||||||
|
|
||||||
// TODO - If there is more than one image, create a canvas and post the created canvas
|
// TODO - If there is more than one image, create a canvas and post the created canvas
|
||||||
if (postImage) {
|
if (postImage) {
|
||||||
@@ -142,10 +133,6 @@ exports.sendPost = (post, source, channel, callback) => {
|
|||||||
rssMessage.setImage(postImage);
|
rssMessage.setImage(postImage);
|
||||||
}
|
}
|
||||||
|
|
||||||
//Add the main content if it's present
|
|
||||||
postContent = postContent.slice(0, 4090).trim();
|
|
||||||
if (postContent) rssMessage.setDescription( postContent );
|
|
||||||
|
|
||||||
channel.send({ embeds: [rssMessage] });
|
channel.send({ embeds: [rssMessage] });
|
||||||
|
|
||||||
//throw new Error("YOU SHALL NOT PASS");
|
//throw new Error("YOU SHALL NOT PASS");
|
||||||
@@ -153,7 +140,7 @@ exports.sendPost = (post, source, channel, callback) => {
|
|||||||
return callback(undefined, true);
|
return callback(undefined, true);
|
||||||
}
|
}
|
||||||
catch (err){
|
catch (err){
|
||||||
log.ERROR("Error sending message: ", postTitle, postId, postContent, postPubDate, err);
|
log.ERROR("Error sending message: ", err);
|
||||||
return callback(err, undefined);
|
return callback(err, undefined);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -161,15 +148,3 @@ exports.sendPost = (post, source, channel, callback) => {
|
|||||||
exports.returnHash = (...stringsIncluded) => {
|
exports.returnHash = (...stringsIncluded) => {
|
||||||
return crypto.createHash('sha1').update(`${stringsIncluded.join("-<<??//\\\\??>>-")}`).digest("base64");
|
return crypto.createHash('sha1').update(`${stringsIncluded.join("-<<??//\\\\??>>-")}`).digest("base64");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a key exists in an array of objects
|
|
||||||
* @param {*} key The key to search for
|
|
||||||
* @param {*} array The object to search for the key
|
|
||||||
* @returns {boolean} If the key exists in the object
|
|
||||||
*/
|
|
||||||
exports.checkForKeyInArrayOfObjects = (key, array) => {
|
|
||||||
return array.filter(function (o) {
|
|
||||||
return o.hasOwnProperty(key);
|
|
||||||
}).length > 0;
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -27,7 +27,12 @@
|
|||||||
"openai": "~3.1.0",
|
"openai": "~3.1.0",
|
||||||
"parse-files": "~0.1.1",
|
"parse-files": "~0.1.1",
|
||||||
"rss-parser": "~3.12.0",
|
"rss-parser": "~3.12.0",
|
||||||
"user-agents": "~1.0.1303"
|
"user-agents": "~1.0.1303",
|
||||||
|
"@improbable-eng/grpc-web": "~0.15.0",
|
||||||
|
"@improbable-eng/grpc-web-node-http-transport": "~0.15.0",
|
||||||
|
"google-protobuf": "~3.21.2",
|
||||||
|
"grpc": "~1.24.11",
|
||||||
|
"typescript": "~4.9.5"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "echo \"Error: no test specified\" && exit 1",
|
"test": "echo \"Error: no test specified\" && exit 1",
|
||||||
|
|||||||
15
update.sh
15
update.sh
@@ -1,15 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Stating Message
|
|
||||||
echo "<!-- UPDATING ---!>"
|
|
||||||
|
|
||||||
# TODO - Add an updater for Stable Diffusion API
|
|
||||||
|
|
||||||
# Update the git Repo
|
|
||||||
git fetch -a -p
|
|
||||||
git pull
|
|
||||||
|
|
||||||
# Install any new libraries
|
|
||||||
npm i
|
|
||||||
|
|
||||||
# Update complete message
|
|
||||||
echo "<!--- UPDATE COMPLETE! ---!>"
|
|
||||||
@@ -18,7 +18,7 @@ exports.DebugBuilder = class DebugBuilder {
|
|||||||
this.ERROR = (...messageParts) => {
|
this.ERROR = (...messageParts) => {
|
||||||
const error = debug(`${appName}:${fileName}:ERROR`);
|
const error = debug(`${appName}:${fileName}:ERROR`);
|
||||||
error(messageParts);
|
error(messageParts);
|
||||||
if (process.env.EXIT_ON_ERROR && process.env.EXIT_ON_ERROR > 0) setTimeout(process.exit, process.env.EXIT_ON_ERROR_DELAY ?? 0);
|
if (process.env.EXIT_ON_ERROR) setTimeout(process.exit, process.env.EXIT_ON_ERROR_DELAY ?? 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ const mysql = require('mysql');
|
|||||||
const databaseConfig = require('../config/databaseConfig');
|
const databaseConfig = require('../config/databaseConfig');
|
||||||
const utils = require('./utils');
|
const utils = require('./utils');
|
||||||
|
|
||||||
const connection = mysql.createPool({
|
const connection = mysql.createConnection({
|
||||||
host: databaseConfig.database_host,
|
host: databaseConfig.database_host,
|
||||||
user: databaseConfig.database_user,
|
user: databaseConfig.database_user,
|
||||||
password: databaseConfig.database_password,
|
password: databaseConfig.database_password,
|
||||||
@@ -11,6 +11,8 @@ const connection = mysql.createPool({
|
|||||||
|
|
||||||
const nodesTable = `${databaseConfig.database_database}.nodes`;
|
const nodesTable = `${databaseConfig.database_database}.nodes`;
|
||||||
|
|
||||||
|
connection.connect()
|
||||||
|
|
||||||
/** Get all nodes the server knows about regardless of status
|
/** Get all nodes the server knows about regardless of status
|
||||||
* @param {*} callback Callback function
|
* @param {*} callback Callback function
|
||||||
*/
|
*/
|
||||||
|
|||||||
263
utilities/stabilityHelpers.js
Normal file
263
utilities/stabilityHelpers.js
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
// Debug
|
||||||
|
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||||
|
const log = new DebugBuilder("server", "stabilityController");
|
||||||
|
|
||||||
|
// Modules
|
||||||
|
const Generation = require("../controllers/stabilityAi/generation_pb");
|
||||||
|
const { GenerationServiceClient } = require("../controllers/stabilityAi/generation_pb_service");
|
||||||
|
const fs = require("fs");
|
||||||
|
|
||||||
|
const { grpc } = require("@improbable-eng/grpc-web");
|
||||||
|
const GRPCWeb = grpc;
|
||||||
|
|
||||||
|
const path = require("node:path");
|
||||||
|
|
||||||
|
function isImageArtifact(artifact) {
|
||||||
|
log.DEBUG("Checking if image is an artifact", artifact);
|
||||||
|
if (artifact.getType() !== Generation.ArtifactType.ARTIFACT_IMAGE) return false;
|
||||||
|
if (artifact.getFinishReason() !== Generation.FinishReason.NULL) return false;
|
||||||
|
return artifact.hasBinary();
|
||||||
|
}
|
||||||
|
exports.isImageArtifact = isImageArtifact;
|
||||||
|
|
||||||
|
function isNSFWFilteredArtifact(artifact) {
|
||||||
|
if (artifact.getType() !== Generation.ArtifactType.ARTIFACT_IMAGE) return false;
|
||||||
|
if (artifact.getFinishReason() !== Generation.FinishReason.FILTER) return false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
exports.isNSFWFilteredArtifact = isNSFWFilteredArtifact;
|
||||||
|
|
||||||
|
/** Builds a generation request for a specified engine with the specified parameters. */
|
||||||
|
function buildGenerationRequest(engineID, params) {
|
||||||
|
if (params.type === "upscaling") {
|
||||||
|
const request = new Generation.Request()
|
||||||
|
request.setEngineId(engineID)
|
||||||
|
request.setRequestedType(Generation.ArtifactType.ARTIFACT_IMAGE)
|
||||||
|
request.setClassifier(new Generation.ClassifierParameters())
|
||||||
|
|
||||||
|
const imageParams = new Generation.ImageParameters()
|
||||||
|
if ("width" in params && !!params.width) {
|
||||||
|
imageParams.setWidth(params.width)
|
||||||
|
} else if ("height" in params && !!params.height) {
|
||||||
|
imageParams.setHeight(params.height)
|
||||||
|
}
|
||||||
|
request.setImage(imageParams)
|
||||||
|
request.addPrompt(createInitImagePrompt(params.initImage))
|
||||||
|
|
||||||
|
return request
|
||||||
|
}
|
||||||
|
|
||||||
|
const imageParams = new Generation.ImageParameters()
|
||||||
|
if (params.type === "text-to-image") {
|
||||||
|
params.width && imageParams.setWidth(params.width)
|
||||||
|
params.height && imageParams.setHeight(params.height)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the number of images to generate (Default 1)
|
||||||
|
params.samples && imageParams.setSamples(params.samples)
|
||||||
|
|
||||||
|
// Set the steps (Default 30)
|
||||||
|
// Represents the amount of inference steps performed on image generation.
|
||||||
|
params.steps && imageParams.setSteps(params.steps)
|
||||||
|
|
||||||
|
// Set the seed (Default 0)
|
||||||
|
// Including a seed will cause the results to be deterministic.
|
||||||
|
// Omitting the seed or setting it to `0` will do the opposite.
|
||||||
|
params.seed && imageParams.addSeed(params.seed)
|
||||||
|
|
||||||
|
// Set the sampler (Default 'automatic')
|
||||||
|
// Omitting this value enables 'automatic' mode where we choose the best sampler for you based
|
||||||
|
// on the current payload. For example, since CLIP guidance only works on ancestral samplers,
|
||||||
|
// when CLIP guidance is enabled, we will automatically choose an ancestral sampler for you.
|
||||||
|
if (params.sampler) {
|
||||||
|
const transformType = new Generation.TransformType()
|
||||||
|
transformType.setDiffusion(params.sampler)
|
||||||
|
imageParams.setTransform(transformType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the Engine
|
||||||
|
// At the time of writing, valid engines are:
|
||||||
|
// stable-diffusion-v1,
|
||||||
|
// stable-diffusion-v1-5
|
||||||
|
// stable-diffusion-512-v2-0
|
||||||
|
// stable-diffusion-768-v2-0
|
||||||
|
// stable-diffusion-512-v2-1
|
||||||
|
// stable-diffusion-768-v2-1
|
||||||
|
// stable-inpainting-v1-0
|
||||||
|
// stable-inpainting-512-v2-0
|
||||||
|
// esrgan-v1-x2plus
|
||||||
|
const request = new Generation.Request()
|
||||||
|
request.setEngineId(engineID)
|
||||||
|
request.setRequestedType(Generation.ArtifactType.ARTIFACT_IMAGE)
|
||||||
|
request.setClassifier(new Generation.ClassifierParameters())
|
||||||
|
|
||||||
|
// Set the CFG scale (Default 7)
|
||||||
|
// Influences how strongly your generation is guided to match your prompt. Higher values match closer.
|
||||||
|
const samplerParams = new Generation.SamplerParameters()
|
||||||
|
params.cfgScale && samplerParams.setCfgScale(params.cfgScale)
|
||||||
|
|
||||||
|
const stepParams = new Generation.StepParameter()
|
||||||
|
stepParams.setScaledStep(0)
|
||||||
|
stepParams.setSampler(samplerParams)
|
||||||
|
|
||||||
|
const scheduleParams = new Generation.ScheduleParameters()
|
||||||
|
if (params.type === "image-to-image") {
|
||||||
|
// If we're doing image-to-image generation then we need to configure
|
||||||
|
// how much influence the initial image has on the diffusion process
|
||||||
|
scheduleParams.setStart(params.stepScheduleStart)
|
||||||
|
if (params.stepScheduleEnd) {
|
||||||
|
scheduleParams.setEnd(params.stepScheduleEnd)
|
||||||
|
}
|
||||||
|
} else if (params.type === "image-to-image-masking") {
|
||||||
|
// Step schedule start is always 1 for masking requests
|
||||||
|
scheduleParams.setStart(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
stepParams.setSchedule(scheduleParams)
|
||||||
|
|
||||||
|
// Set CLIP Guidance (Default: None)
|
||||||
|
// NOTE: This only works with ancestral samplers. Omitting the sampler parameter above will ensure
|
||||||
|
// that we automatically choose an ancestral sampler for you when CLIP guidance is enabled.
|
||||||
|
if (params.clipGuidancePreset) {
|
||||||
|
const guidanceParameters = new Generation.GuidanceParameters()
|
||||||
|
guidanceParameters.setGuidancePreset(params.clipGuidancePreset)
|
||||||
|
stepParams.setGuidance(guidanceParameters)
|
||||||
|
}
|
||||||
|
|
||||||
|
imageParams.addParameters(stepParams)
|
||||||
|
request.setImage(imageParams)
|
||||||
|
|
||||||
|
params.prompts.forEach(textPrompt => {
|
||||||
|
const prompt = new Generation.Prompt()
|
||||||
|
prompt.setText(textPrompt.text)
|
||||||
|
|
||||||
|
// If provided, set the prompt's weight (use negative values for negative weighting)
|
||||||
|
if (textPrompt.weight) {
|
||||||
|
const promptParameters = new Generation.PromptParameters()
|
||||||
|
promptParameters.setWeight(textPrompt.weight)
|
||||||
|
prompt.setParameters(promptParameters)
|
||||||
|
}
|
||||||
|
|
||||||
|
request.addPrompt(prompt)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Add image prompts if we're doing some kind of image-to-image generation or upscaling
|
||||||
|
if (params.type === "image-to-image") {
|
||||||
|
request.addPrompt(createInitImagePrompt(params.initImage))
|
||||||
|
} else if (params.type === "image-to-image-masking") {
|
||||||
|
request.addPrompt(createInitImagePrompt(params.initImage))
|
||||||
|
request.addPrompt(createMaskImagePrompt(params.maskImage))
|
||||||
|
}
|
||||||
|
|
||||||
|
return request
|
||||||
|
}
|
||||||
|
exports.buildGenerationRequest = buildGenerationRequest;
|
||||||
|
|
||||||
|
function createInitImagePrompt(imageBinary) {
|
||||||
|
const initImageArtifact = new Generation.Artifact()
|
||||||
|
initImageArtifact.setBinary(imageBinary)
|
||||||
|
initImageArtifact.setType(Generation.ArtifactType.ARTIFACT_IMAGE)
|
||||||
|
|
||||||
|
const initImageParameters = new Generation.PromptParameters()
|
||||||
|
initImageParameters.setInit(true)
|
||||||
|
|
||||||
|
const initImagePrompt = new Generation.Prompt()
|
||||||
|
initImagePrompt.setParameters(initImageParameters)
|
||||||
|
initImagePrompt.setArtifact(initImageArtifact)
|
||||||
|
|
||||||
|
return initImagePrompt
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMaskImagePrompt(imageBinary) {
|
||||||
|
const maskImageArtifact = new Generation.Artifact()
|
||||||
|
maskImageArtifact.setBinary(imageBinary)
|
||||||
|
maskImageArtifact.setType(Generation.ArtifactType.ARTIFACT_MASK)
|
||||||
|
|
||||||
|
const maskImagePrompt = new Generation.Prompt()
|
||||||
|
maskImagePrompt.setArtifact(maskImageArtifact)
|
||||||
|
|
||||||
|
return maskImagePrompt
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Executes a GenerationRequest, abstracting the gRPC streaming result behind a Promise */
|
||||||
|
async function executeGenerationRequest(
|
||||||
|
generationClient,
|
||||||
|
request,
|
||||||
|
metadata
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const stream = generationClient.generate(request, metadata)
|
||||||
|
const answers = await new Promise((resolve, reject) => {
|
||||||
|
const answers = new Array()
|
||||||
|
|
||||||
|
stream.on("data", data => answers.push(data))
|
||||||
|
stream.on("end", () => resolve(answers))
|
||||||
|
stream.on("status", status => {
|
||||||
|
if (status.code === 0) return
|
||||||
|
reject(status.details)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return extractArtifacts(answers)
|
||||||
|
} catch (err) {
|
||||||
|
return err instanceof Error ? err : new Error(JSON.stringify(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.executeGenerationRequest = executeGenerationRequest;
|
||||||
|
|
||||||
|
function extractArtifacts(answers) {
|
||||||
|
const imageArtifacts = new Array()
|
||||||
|
const filteredArtifacts = new Array()
|
||||||
|
|
||||||
|
for (const answer of answers) {
|
||||||
|
for (const artifact of answer.getArtifactsList()) {
|
||||||
|
if (isImageArtifact(artifact)) {
|
||||||
|
imageArtifacts.push(artifact)
|
||||||
|
} else if (isNSFWFilteredArtifact(artifact)) {
|
||||||
|
filteredArtifacts.push(artifact)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { filteredArtifacts, imageArtifacts }
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Generation completion handler - replace this with your own logic */
|
||||||
|
function onGenerationComplete(response) {
|
||||||
|
if (response instanceof Error) {
|
||||||
|
log.ERROR("Generation failed", response)
|
||||||
|
throw response
|
||||||
|
}
|
||||||
|
|
||||||
|
log.DEBUG(
|
||||||
|
`${response.imageArtifacts.length} image${
|
||||||
|
response.imageArtifacts.length > 1 ? "s" : ""
|
||||||
|
} were successfully generated.`
|
||||||
|
)
|
||||||
|
|
||||||
|
// Do something with NSFW filtered artifacts
|
||||||
|
if (response.filteredArtifacts.length > 0) {
|
||||||
|
log.DEBUG(
|
||||||
|
`${response.filteredArtifacts.length} artifact` +
|
||||||
|
`${response.filteredArtifacts.length > 1 ? "s" : ""}` +
|
||||||
|
` were filtered by the NSFW classifier and need to be retried.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do something with the successful image artifacts
|
||||||
|
response.imageArtifacts.forEach(artifact => {
|
||||||
|
try {
|
||||||
|
const writePath =
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.resolve(__dirname, `../.generations/image-${artifact.getSeed()}.png`),
|
||||||
|
Buffer.from(artifact.getBinary_asU8())
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
log.ERROR("Failed to write resulting image to disk", error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// For browser implementations: you could use the `artifact.getBinary_asB64()` method to get a
|
||||||
|
// base64 encoded string and then create a data URL from that and display it in an <img> tag.
|
||||||
|
}
|
||||||
|
exports.onGenerationComplete = onGenerationComplete;
|
||||||
Reference in New Issue
Block a user