Compare commits
13 Commits
8ed0b969dd
...
feature/#6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e874e3c785 | ||
|
|
53256cdc42 | ||
|
|
3c05395ddf | ||
|
|
f840d754ac | ||
|
|
cb8dfca8dd | ||
| d372bcd8af | |||
| 20856734de | |||
|
|
cdb766520d | ||
| fcf61f3958 | |||
|
|
af74c7b90d | ||
|
|
ffacd19883 | ||
|
|
9e2814cb2c | ||
|
|
72134b1b7b |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,3 +4,4 @@ package-lock.json
|
||||
*.bak
|
||||
*.log
|
||||
*._.*
|
||||
.generations/
|
||||
@@ -1,10 +1,10 @@
|
||||
const { submitImagePromptTransaction } = require("../controllers/openAiController");
|
||||
const { SlashCommandBuilder } = require('discord.js');
|
||||
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||
const { submitImagePromptTransaction, DALLE_COLOR } = require("../controllers/openAiController");
|
||||
const log = new DebugBuilder("server", "imagine");
|
||||
const { EmmeliaEmbedBuilder } = require('../libUtils');
|
||||
|
||||
const COST_OF_COMMAND = 800
|
||||
const COST_OF_COMMAND = 800;
|
||||
|
||||
module.exports = {
|
||||
data: new SlashCommandBuilder()
|
||||
@@ -45,21 +45,31 @@ module.exports = {
|
||||
submitImagePromptTransaction(promptText, discordAccountId, images, size, interaction, this, async (err, imageResults) => {
|
||||
if (err) throw err;
|
||||
|
||||
var dalleEmbeds = [];
|
||||
log.DEBUG("Image Results: ", imageResults)
|
||||
|
||||
const dalleEmbed = new EmmeliaEmbedBuilder()
|
||||
.setColor(0x0099FF)
|
||||
// Add the information post
|
||||
dalleEmbeds.push(new EmmeliaEmbedBuilder()
|
||||
.setColor(DALLE_COLOR)
|
||||
.setTitle(`New Image Result`)
|
||||
.setDescription(`${interaction.member.user} sent the prompt: '${promptText}'`)
|
||||
.addFields({ name: 'Tokens Used', value: `${imageResults.totalTokens}`, inline: true })
|
||||
|
||||
const imagesInResult = Array(imageResults.results.data).length
|
||||
|
||||
);
|
||||
// Add the images to the result
|
||||
const imagesInResult = Array(imageResults.results).length
|
||||
log.DEBUG("Images in the result: ", imagesInResult);
|
||||
|
||||
if (imagesInResult == 1) dalleEmbed.setImage(imageResults.results.data[0].url);
|
||||
|
||||
await interaction.editReply({ embeds: [dalleEmbed], ephemeral: false });
|
||||
if (imagesInResult >= 1) {
|
||||
for (const imageData of imageResults.results.data){
|
||||
const imageUrl = imageData.url;
|
||||
dalleEmbeds.push(new EmmeliaEmbedBuilder().setURL(imageUrl).setImage(imageUrl).setColor(DALLE_COLOR));
|
||||
}
|
||||
}
|
||||
// Add the information post
|
||||
dalleEmbeds.push(new EmmeliaEmbedBuilder()
|
||||
.setColor(DALLE_COLOR)
|
||||
.addFields({ name: 'Tokens Used', value: `${imageResults.totalTokens}`, inline: true })
|
||||
.addFields({ name: 'Images Generated', value: `${imagesInResult}`, inline: true })
|
||||
.addFields({ name: 'Image Size Requested', value: `${imagesInResult}`, inline: true })
|
||||
);
|
||||
await interaction.editReply({ embeds: dalleEmbeds, ephemeral: false });
|
||||
});
|
||||
|
||||
// Needs reply code to reply to the generation
|
||||
|
||||
33
commands/stability.js
Normal file
33
commands/stability.js
Normal file
@@ -0,0 +1,33 @@
|
||||
const { SlashCommandBuilder } = require('discord.js');
|
||||
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||
const log = new DebugBuilder("server", "stability");
|
||||
const { submitImageGenerationTransaction, STABILITY_COLOR } = require("../controllers/stabilityController");
|
||||
const { EmmeliaEmbedBuilder } = require('../libUtils');
|
||||
|
||||
|
||||
module.exports = {
|
||||
data: new SlashCommandBuilder()
|
||||
.setName('stability')
|
||||
.setDescription('Replies with your input!'),
|
||||
/*
|
||||
.addStringOption(option =>
|
||||
option.setName('input')
|
||||
.setDescription('The input to echo back')
|
||||
.setRequired(false)
|
||||
.addChoices()),
|
||||
*/
|
||||
example: "stability",
|
||||
isPrivileged: true,
|
||||
requiresTokens: false,
|
||||
defaultTokenUsage: 0,
|
||||
deferInitialReply: true,
|
||||
async execute(interaction) {
|
||||
try{
|
||||
await submitImageGenerationTransaction();
|
||||
await interaction.editReply('**Pong.**'); // TODO - Add insults as the response to this command
|
||||
}catch(err){
|
||||
log.ERROR(err)
|
||||
//await interaction.reply(err.toString());
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -14,6 +14,10 @@ const configuration = new Configuration({
|
||||
|
||||
const openai = new OpenAIApi(configuration);
|
||||
|
||||
// Global Vars for Other functions
|
||||
exports.DALLE_COLOR = 0x34c6eb;
|
||||
exports.CHATGPT_COLOR = 0x34eb9b;
|
||||
|
||||
async function getImageGeneration(_prompt, { _images_to_generate = 1, _image_size = "256x256" }, callback){
|
||||
const validImageSizes = ["256x256", "512x512", "1024x1024"];
|
||||
|
||||
@@ -140,7 +144,7 @@ exports.submitTextPromptTransaction = async (prompt, temperature, max_tokens, di
|
||||
}
|
||||
|
||||
if (!images_to_generate) images_to_generate = 1;
|
||||
if (!image_size) images_to_generate = "256x256";
|
||||
if (!image_size) image_size = "256x256";
|
||||
|
||||
totalTokensToBeUsed = pricePerImage * images_to_generate;
|
||||
|
||||
|
||||
71
controllers/stabilityAi/generation_grpc_pb.js
Normal file
71
controllers/stabilityAi/generation_grpc_pb.js
Normal file
@@ -0,0 +1,71 @@
|
||||
// GENERATED CODE -- DO NOT EDIT!
|
||||
|
||||
'use strict';
|
||||
var grpc = require('grpc');
|
||||
var generation_pb = require('./generation_pb.js');
|
||||
var google_protobuf_struct_pb = require('google-protobuf/google/protobuf/struct_pb.js');
|
||||
var tensors_pb = require('./tensors_pb.js');
|
||||
|
||||
function serialize_gooseai_Answer(arg) {
|
||||
if (!(arg instanceof generation_pb.Answer)) {
|
||||
throw new Error('Expected argument of type gooseai.Answer');
|
||||
}
|
||||
return Buffer.from(arg.serializeBinary());
|
||||
}
|
||||
|
||||
function deserialize_gooseai_Answer(buffer_arg) {
|
||||
return generation_pb.Answer.deserializeBinary(new Uint8Array(buffer_arg));
|
||||
}
|
||||
|
||||
function serialize_gooseai_ChainRequest(arg) {
|
||||
if (!(arg instanceof generation_pb.ChainRequest)) {
|
||||
throw new Error('Expected argument of type gooseai.ChainRequest');
|
||||
}
|
||||
return Buffer.from(arg.serializeBinary());
|
||||
}
|
||||
|
||||
function deserialize_gooseai_ChainRequest(buffer_arg) {
|
||||
return generation_pb.ChainRequest.deserializeBinary(new Uint8Array(buffer_arg));
|
||||
}
|
||||
|
||||
function serialize_gooseai_Request(arg) {
|
||||
if (!(arg instanceof generation_pb.Request)) {
|
||||
throw new Error('Expected argument of type gooseai.Request');
|
||||
}
|
||||
return Buffer.from(arg.serializeBinary());
|
||||
}
|
||||
|
||||
function deserialize_gooseai_Request(buffer_arg) {
|
||||
return generation_pb.Request.deserializeBinary(new Uint8Array(buffer_arg));
|
||||
}
|
||||
|
||||
|
||||
//
|
||||
// gRPC services
|
||||
//
|
||||
var GenerationServiceService = exports.GenerationServiceService = {
|
||||
generate: {
|
||||
path: '/gooseai.GenerationService/Generate',
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestType: generation_pb.Request,
|
||||
responseType: generation_pb.Answer,
|
||||
requestSerialize: serialize_gooseai_Request,
|
||||
requestDeserialize: deserialize_gooseai_Request,
|
||||
responseSerialize: serialize_gooseai_Answer,
|
||||
responseDeserialize: deserialize_gooseai_Answer,
|
||||
},
|
||||
chainGenerate: {
|
||||
path: '/gooseai.GenerationService/ChainGenerate',
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestType: generation_pb.ChainRequest,
|
||||
responseType: generation_pb.Answer,
|
||||
requestSerialize: serialize_gooseai_ChainRequest,
|
||||
requestDeserialize: deserialize_gooseai_ChainRequest,
|
||||
responseSerialize: serialize_gooseai_Answer,
|
||||
responseDeserialize: deserialize_gooseai_Answer,
|
||||
},
|
||||
};
|
||||
|
||||
exports.GenerationServiceClient = grpc.makeGenericClientConstructor(GenerationServiceService);
|
||||
11988
controllers/stabilityAi/generation_pb.js
Normal file
11988
controllers/stabilityAi/generation_pb.js
Normal file
File diff suppressed because it is too large
Load Diff
117
controllers/stabilityAi/generation_pb_service.js
Normal file
117
controllers/stabilityAi/generation_pb_service.js
Normal file
@@ -0,0 +1,117 @@
|
||||
// package: gooseai
|
||||
// file: generation.proto
|
||||
|
||||
var generation_pb = require("./generation_pb");
|
||||
var grpc = require("@improbable-eng/grpc-web").grpc;
|
||||
|
||||
var GenerationService = (function () {
|
||||
function GenerationService() {}
|
||||
GenerationService.serviceName = "gooseai.GenerationService";
|
||||
return GenerationService;
|
||||
}());
|
||||
|
||||
GenerationService.Generate = {
|
||||
methodName: "Generate",
|
||||
service: GenerationService,
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestType: generation_pb.Request,
|
||||
responseType: generation_pb.Answer
|
||||
};
|
||||
|
||||
GenerationService.ChainGenerate = {
|
||||
methodName: "ChainGenerate",
|
||||
service: GenerationService,
|
||||
requestStream: false,
|
||||
responseStream: true,
|
||||
requestType: generation_pb.ChainRequest,
|
||||
responseType: generation_pb.Answer
|
||||
};
|
||||
|
||||
exports.GenerationService = GenerationService;
|
||||
|
||||
function GenerationServiceClient(serviceHost, options) {
|
||||
this.serviceHost = serviceHost;
|
||||
this.options = options || {};
|
||||
}
|
||||
|
||||
GenerationServiceClient.prototype.generate = function generate(requestMessage, metadata) {
|
||||
var listeners = {
|
||||
data: [],
|
||||
end: [],
|
||||
status: []
|
||||
};
|
||||
var client = grpc.invoke(GenerationService.Generate, {
|
||||
request: requestMessage,
|
||||
host: this.serviceHost,
|
||||
metadata: metadata,
|
||||
transport: this.options.transport,
|
||||
debug: this.options.debug,
|
||||
onMessage: function (responseMessage) {
|
||||
listeners.data.forEach(function (handler) {
|
||||
handler(responseMessage);
|
||||
});
|
||||
},
|
||||
onEnd: function (status, statusMessage, trailers) {
|
||||
listeners.status.forEach(function (handler) {
|
||||
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||
});
|
||||
listeners.end.forEach(function (handler) {
|
||||
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||
});
|
||||
listeners = null;
|
||||
}
|
||||
});
|
||||
return {
|
||||
on: function (type, handler) {
|
||||
listeners[type].push(handler);
|
||||
return this;
|
||||
},
|
||||
cancel: function () {
|
||||
listeners = null;
|
||||
client.close();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
GenerationServiceClient.prototype.chainGenerate = function chainGenerate(requestMessage, metadata) {
|
||||
var listeners = {
|
||||
data: [],
|
||||
end: [],
|
||||
status: []
|
||||
};
|
||||
var client = grpc.invoke(GenerationService.ChainGenerate, {
|
||||
request: requestMessage,
|
||||
host: this.serviceHost,
|
||||
metadata: metadata,
|
||||
transport: this.options.transport,
|
||||
debug: this.options.debug,
|
||||
onMessage: function (responseMessage) {
|
||||
listeners.data.forEach(function (handler) {
|
||||
handler(responseMessage);
|
||||
});
|
||||
},
|
||||
onEnd: function (status, statusMessage, trailers) {
|
||||
listeners.status.forEach(function (handler) {
|
||||
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||
});
|
||||
listeners.end.forEach(function (handler) {
|
||||
handler({ code: status, details: statusMessage, metadata: trailers });
|
||||
});
|
||||
listeners = null;
|
||||
}
|
||||
});
|
||||
return {
|
||||
on: function (type, handler) {
|
||||
listeners[type].push(handler);
|
||||
return this;
|
||||
},
|
||||
cancel: function () {
|
||||
listeners = null;
|
||||
client.close();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
exports.GenerationServiceClient = GenerationServiceClient;
|
||||
|
||||
1108
controllers/stabilityAi/tensors_pb.js
Normal file
1108
controllers/stabilityAi/tensors_pb.js
Normal file
File diff suppressed because it is too large
Load Diff
60
controllers/stabilityController.js
Normal file
60
controllers/stabilityController.js
Normal file
@@ -0,0 +1,60 @@
|
||||
// Debug
|
||||
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||
const log = new DebugBuilder("server", "stabilityController");
|
||||
|
||||
// Modules
|
||||
const Generation = require("./stabilityAi/generation_pb");
|
||||
const { GenerationServiceClient } = require("./stabilityAi/generation_pb_service");
|
||||
|
||||
const { grpc } = require("@improbable-eng/grpc-web");
|
||||
const GRPCWeb = grpc;
|
||||
|
||||
const { NodeHttpTransport } = require("@improbable-eng/grpc-web-node-http-transport");
|
||||
const fs = require("fs");
|
||||
const {
|
||||
buildGenerationRequest,
|
||||
executeGenerationRequest,
|
||||
onGenerationComplete,
|
||||
} = require("../utilities/stabilityHelpers");
|
||||
|
||||
// Set Global Color for this controller
|
||||
exports.STABILITY_COLOR = 0xeb34b7;
|
||||
|
||||
// This is a NodeJS-specific requirement - browsers implementations should omit this line.
|
||||
GRPCWeb.setDefaultTransport(NodeHttpTransport());
|
||||
|
||||
// Authenticate using your API key, don't commit your key to a public repository!
|
||||
const metadata = new GRPCWeb.Metadata();
|
||||
metadata.set("Authorization", "Bearer " + process.env.STABILITY_API_KEY);
|
||||
|
||||
// Create a generation client to use with all future requests
|
||||
const stabilityClient = new GenerationServiceClient("https://grpc.stability.ai", {});
|
||||
|
||||
exports.submitImageGenerationTransaction = async () => {
|
||||
const request = buildGenerationRequest("stable-diffusion-512-v2-1", {
|
||||
type: "text-to-image",
|
||||
prompts: [
|
||||
{
|
||||
text: "A dream of a distant galaxy, by Caspar David Friedrich, matte painting trending on artstation HQ",
|
||||
},
|
||||
],
|
||||
width: 512,
|
||||
height: 512,
|
||||
samples: 1,
|
||||
cfgScale: 13,
|
||||
steps: 10,
|
||||
sampler: Generation.DiffusionSampler.SAMPLER_K_DPMPP_2M,
|
||||
});
|
||||
log.DEBUG("Stability request: ", request, metadata, stabilityClient);
|
||||
|
||||
executeGenerationRequest(stabilityClient, request, metadata)
|
||||
.then((response) => {
|
||||
log.DEBUG("Stability Generation response: ". response)
|
||||
onGenerationComplete(response);
|
||||
return;
|
||||
})
|
||||
.catch((error) => {
|
||||
log.ERROR("Failed to make text-to-image request:", error);
|
||||
}
|
||||
);
|
||||
}
|
||||
73
libCore.js
73
libCore.js
@@ -5,6 +5,7 @@ const { FeedStorage, PostStorage } = require("./libStorage");
|
||||
const libUtils = require("./libUtils");
|
||||
const { DebugBuilder } = require("./utilities/debugBuilder");
|
||||
const log = new DebugBuilder("server", "libCore");
|
||||
const mysql = require("mysql");
|
||||
|
||||
const UserAgent = require("user-agents");
|
||||
process.env.USER_AGENT_STRING = new UserAgent({ platform: 'Win32' }).toString();
|
||||
@@ -83,63 +84,99 @@ exports.deleteSource = function (title, callback) {
|
||||
/**
|
||||
* Update channels with new posts from sources
|
||||
*/
|
||||
exports.updateFeeds = async (client) => {
|
||||
exports.updateFeeds = (client) => {
|
||||
if (!client) throw new Error("Client object not passed");
|
||||
feedStorage.getAllRecords(async (err, records) => {
|
||||
// Create a temp pool to use for all connections while updating the feed
|
||||
var tempConnection = mysql.createPool({
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASS,
|
||||
database: process.env.DB_NAME,
|
||||
connectionLimit: 10
|
||||
});
|
||||
|
||||
const tempFeedStorage = new FeedStorage(tempConnection);
|
||||
const tempPostStorage = new PostStorage(tempConnection);
|
||||
|
||||
// Array of promises to wait on before closing the connection
|
||||
var recordPromiseArray = [];
|
||||
var sourcePromiseArray = [];
|
||||
|
||||
tempFeedStorage.getAllRecords(async (err, records) => {
|
||||
// Load the posts from each RSS source
|
||||
for (const source of records) {
|
||||
sourcePromiseArray.push(new Promise((resolve, reject) => {
|
||||
log.DEBUG('Record title: ', source.title);
|
||||
log.DEBUG('Record link: ', source.link);
|
||||
log.DEBUG('Record category: ', source.category);
|
||||
log.DEBUG('Record guild ID: ', source.guild_id);
|
||||
log.DEBUG('Record channel ID: ', source.channel_id);
|
||||
|
||||
await parser.parseURL(source.link, async (err, parsedFeed) => {
|
||||
// Parse the RSS feed
|
||||
parser.parseURL(source.link, async (err, parsedFeed) => {
|
||||
if (err) {
|
||||
log.ERROR("Parser Error: ", source, err);
|
||||
return;
|
||||
reject;
|
||||
}
|
||||
|
||||
try{
|
||||
try {
|
||||
log.DEBUG("Parsed Feed Keys", Object.keys(parsedFeed), parsedFeed?.title);
|
||||
if (parsedFeed?.items){
|
||||
for (const post of parsedFeed.items){
|
||||
recordPromiseArray.push(new Promise((recordResolve, recordReject) => {
|
||||
log.DEBUG("Parsed Source Keys", Object.keys(post), post?.title);
|
||||
//log.VERBOSE("Post from feed: ", post);
|
||||
if (post.title && post.link && post.content && ( post.postId || post.guid || post.id ) && post.pubDate){
|
||||
post.postId = post.postId ?? post.guid ?? post.id;
|
||||
postStorage.getRecordBy('post_guid', post.postId, (err, existingRecord) => {
|
||||
log.VERBOSE("Post from feed: ", post);
|
||||
if (!post.title || !post.link || !post.pubDate) return recordReject("Missing information from the post");
|
||||
if (!post.content || !post['content:encoded']) log.WARN("There is no content for post: ", post.title);
|
||||
|
||||
post.postId = post.postId ?? post.guid ?? post.id ?? libUtils.returnHash(post.title, post.link, post.pubDate);
|
||||
tempPostStorage.getRecordBy('post_guid', post.postId, (err, existingRecord) => {
|
||||
if (err) throw err;
|
||||
|
||||
log.DEBUG("Existing post record: ", existingRecord);
|
||||
if (!existingRecord){
|
||||
if (existingRecord) return recordResolve("Existing record found for this post");
|
||||
|
||||
const channel = client.channels.cache.get(source.channel_id);
|
||||
libUtils.sendPost(post, source, channel, (err, sendResults) =>{
|
||||
if (err) throw err;
|
||||
|
||||
if (sendResults){
|
||||
if (!sendResults) {
|
||||
log.ERROR("No sending results from sending a post: ", sendResults, existingRecord, post);
|
||||
return recordReject("No sending results from sending a post");
|
||||
}
|
||||
|
||||
log.DEBUG("Saving post to database: ", sendResults, post.title, source.channel_id);
|
||||
|
||||
postStorage.savePost(post, (err, saveResults) => {
|
||||
tempPostStorage.savePost(post, (err, saveResults) => {
|
||||
if(err) throw err;
|
||||
|
||||
if (saveResults) {
|
||||
log.DEBUG("Saved results: ", saveResults);
|
||||
return;
|
||||
return recordResolve("Saved results", saveResults);
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}catch (err) {
|
||||
catch (err) {
|
||||
log.ERROR("Error Parsing Feed: ", source.link, err);
|
||||
throw err;
|
||||
}
|
||||
Promise.all(recordPromiseArray).then((values) => {
|
||||
log.DEBUG("All posts finished for: ", source.title, values);
|
||||
return resolve(source.title);
|
||||
});
|
||||
});
|
||||
}))
|
||||
}
|
||||
|
||||
// Wait for all connections to finish then close the temp connections
|
||||
|
||||
Promise.all(sourcePromiseArray).then((values) => {
|
||||
log.DEBUG("All sources finished, closing temp connections: ", values);
|
||||
tempConnection.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ const accountsTable = process.env.DB_ACCOUNTS_TABLE;
|
||||
const transactionsTable = process.env.DB_TRANSACTIONS_TABLE;
|
||||
const pricingTable = process.env.DB_PRICING_TABLE;
|
||||
|
||||
var connection = mysql.createPool({
|
||||
var Connection = mysql.createPool({
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASS,
|
||||
@@ -26,14 +26,15 @@ var connection = mysql.createPool({
|
||||
|
||||
// Helper Functions
|
||||
// Function to run and handle SQL errors
|
||||
function runSQL(sqlQuery, callback = (err, rows) => {
|
||||
function runSQL(sqlQuery, connection, callback = (err, rows) => {
|
||||
log.ERROR(err);
|
||||
throw err;
|
||||
}) {
|
||||
// Start the MySQL Connection
|
||||
if (!connection) connection = Connection;
|
||||
connection.query(sqlQuery, (err, rows) => {
|
||||
if (err) {
|
||||
log.ERROR("SQL Error:", err)
|
||||
log.ERROR("SQL Error on query:", sqlQuery, err);
|
||||
return callback(err, undefined);
|
||||
}
|
||||
log.VERBOSE(`SQL result for query '${sqlQuery}':`, rows);
|
||||
@@ -51,13 +52,14 @@ function returnMysqlTime(){
|
||||
}
|
||||
|
||||
class Storage {
|
||||
constructor(_dbTable) {
|
||||
constructor(_dbTable, _connection) {
|
||||
this.dbTable = _dbTable;
|
||||
this.connection = _connection;
|
||||
this.validKeys = [];
|
||||
|
||||
var sqlQuery = `SHOW COLUMNS FROM ${this.dbTable};`;
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return log.ERROR("Error getting column names: ", err);
|
||||
if (rows){
|
||||
for (const validKey of rows){
|
||||
@@ -96,7 +98,7 @@ class Storage {
|
||||
|
||||
const sqlQuery = `SELECT * FROM ${this.dbTable} WHERE ${key} = "${keyValue}"`;
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
if (rows[0]?.[key]) return callback(undefined, rows[0]);
|
||||
else return callback(undefined, false);
|
||||
@@ -113,7 +115,7 @@ class Storage {
|
||||
|
||||
let records = [];
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
for (const row of rows) {
|
||||
if (this.dbTable == rssFeedsTable){
|
||||
@@ -139,7 +141,7 @@ class Storage {
|
||||
|
||||
let records = [];
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
for (const row of rows) {
|
||||
if (this.dbTable == rssFeedsTable){
|
||||
@@ -153,11 +155,21 @@ class Storage {
|
||||
return callback(undefined, records);
|
||||
});
|
||||
}
|
||||
|
||||
closeConnection() {
|
||||
try {
|
||||
this.connection.end();
|
||||
}
|
||||
catch (err) {
|
||||
log.ERROR("Error closing connection :", this.connection, err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.UserStorage = class UserStorage extends Storage {
|
||||
constructor() {
|
||||
super(accountsTable);
|
||||
constructor(connection = undefined) {
|
||||
super(accountsTable, connection);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -171,7 +183,7 @@ exports.UserStorage = class UserStorage extends Storage {
|
||||
|
||||
log.DEBUG(`Adding new entry with SQL query: '${sqlQuery}'`)
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
if (rows?.affectedRows > 0) return callback(undefined, rows);
|
||||
return callback(undefined, undefined);
|
||||
@@ -231,7 +243,7 @@ exports.UserStorage = class UserStorage extends Storage {
|
||||
|
||||
log.DEBUG("Updating Balance with SQL Query: ", sqlQuery);
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
if (!rows?.affectedRows > 0) return callback(new Error("Error updating Balance", rows), undefined);
|
||||
return callback(undefined, rows);
|
||||
@@ -240,8 +252,8 @@ exports.UserStorage = class UserStorage extends Storage {
|
||||
}
|
||||
|
||||
exports.TransactionStorage = class TransactionStorage extends Storage {
|
||||
constructor() {
|
||||
super(transactionsTable);
|
||||
constructor(connection = undefined) {
|
||||
super(transactionsTable, connection);
|
||||
}
|
||||
|
||||
createTransaction(transaction, callback){
|
||||
@@ -249,7 +261,7 @@ exports.TransactionStorage = class TransactionStorage extends Storage {
|
||||
|
||||
log.DEBUG(`Adding new entry with SQL query: '${sqlQuery}'`)
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
if (rows?.affectedRows > 0) return callback(undefined, rows);
|
||||
return callback(undefined, undefined);
|
||||
@@ -258,8 +270,8 @@ exports.TransactionStorage = class TransactionStorage extends Storage {
|
||||
}
|
||||
|
||||
exports.FeedStorage = class FeedStorage extends Storage {
|
||||
constructor() {
|
||||
super(rssFeedsTable);
|
||||
constructor(connection = undefined) {
|
||||
super(rssFeedsTable, connection);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -316,7 +328,7 @@ exports.FeedStorage = class FeedStorage extends Storage {
|
||||
|
||||
log.DEBUG(`Adding new entry with SQL query: '${sqlQuery}'`)
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
return callback(undefined, rows);
|
||||
})
|
||||
@@ -355,7 +367,7 @@ exports.FeedStorage = class FeedStorage extends Storage {
|
||||
|
||||
log.DEBUG(`Updating entry with SQL query: '${sqlQuery}'`)
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
return callback(undefined, rows);
|
||||
})
|
||||
@@ -373,7 +385,7 @@ exports.FeedStorage = class FeedStorage extends Storage {
|
||||
|
||||
const sqlQuery = `DELETE FROM ${this.dbTable} WHERE id = "${id}";`;
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
return callback(undefined, rows[0]);
|
||||
})
|
||||
@@ -439,8 +451,8 @@ exports.FeedStorage = class FeedStorage extends Storage {
|
||||
}
|
||||
|
||||
exports.PostStorage = class PostStorage extends Storage {
|
||||
constructor() {
|
||||
super(rssPostsTable);
|
||||
constructor(connection = undefined) {
|
||||
super(rssPostsTable, connection);
|
||||
}
|
||||
|
||||
savePost(_postObject, callback){
|
||||
@@ -456,7 +468,7 @@ exports.PostStorage = class PostStorage extends Storage {
|
||||
|
||||
log.DEBUG(`Adding new post with SQL query: '${sqlQuery}'`)
|
||||
|
||||
runSQL(sqlQuery, (err, rows) => {
|
||||
runSQL(sqlQuery, this.connection, (err, rows) => {
|
||||
if (err) return callback(err, undefined);
|
||||
return callback(undefined, rows);
|
||||
})
|
||||
|
||||
30
libUtils.js
30
libUtils.js
@@ -3,8 +3,10 @@ const { DebugBuilder } = require("./utilities/debugBuilder");
|
||||
const log = new DebugBuilder("server", "libUtils");
|
||||
const { NodeHtmlMarkdown } = require('node-html-markdown');
|
||||
const { parse } = require("node-html-parser");
|
||||
const crypto = require("crypto");
|
||||
|
||||
const imageRegex = /(http(s?):)([/|.|\w|\s|-])*((\.(?:jpg|gif|png|webm))|(\/gallery\/(?:[/|.|\w|\s|-])*))/g;
|
||||
const youtubeVideoRegex = /((?:https?:)?\/\/)?((?:www|m)\.)?((?:youtube(-nocookie)?\.com|youtu.be))(\/(?:[\w\-]+\?v=|embed\/|v\/)?)([\w\-]+)/g
|
||||
|
||||
exports.EmmeliaEmbedBuilder = class PostEmbedBuilder extends EmbedBuilder {
|
||||
constructor() {
|
||||
@@ -74,10 +76,24 @@ exports.onError = (error) => {
|
||||
|
||||
exports.sendPost = (post, source, channel, callback) => {
|
||||
log.DEBUG("Sending post from source: ", post, source);
|
||||
post.content = parse(post.content);
|
||||
// Reset the content parameter with the encoded parameter
|
||||
post.content = parse(post['content:encoded'] ?? post.content);
|
||||
const postTitle = post.title;
|
||||
const postLink = post.link;
|
||||
const postContent = NodeHtmlMarkdown.translate(post.content.text);
|
||||
// Get the post content and trim it to length or add a placeholder if necessary
|
||||
var postText = String(post.content.text);
|
||||
if (postText.length >= 300) postText = `${postText.slice(0, 300).substring(0, Math.min(String(post.content.text).length, String(post.content.text).lastIndexOf(" ")))}...`;
|
||||
else if (postText.length === 0) postText = `*This post has no content* [Direct Link](${post.link})`;
|
||||
var postContent = postText;
|
||||
// Check for embedded youtube videos and add the first four as links
|
||||
const ytVideos = String(post.content).match(youtubeVideoRegex);
|
||||
if (ytVideos) {
|
||||
for (const ytVideo of ytVideos.slice(0,4)){
|
||||
// If the video is an embed, replace the embed to make it watchable
|
||||
if (ytVideo.includes("embed")) ytVideo = ytVideo.replace("embed/", "watch?v=");
|
||||
postContent += `\nEmbeded Video from Post: [YouTube](${ytVideo})`
|
||||
}
|
||||
}
|
||||
log.DEBUG("Post content: ", postContent);
|
||||
|
||||
const postId = post.postId;
|
||||
@@ -90,6 +106,8 @@ exports.sendPost = (post, source, channel, callback) => {
|
||||
if (linksInPost) {
|
||||
log.DEBUG("Found links in post:", linksInPost);
|
||||
for (const link of linksInPost) {
|
||||
// Check to see if this link is a youtube video that was already found, if so skip it
|
||||
if (ytVideos?.includes(link)) continue;
|
||||
const images = String(link.getAttribute("href")).match(imageRegex);
|
||||
log.DEBUG("Images found in post:", images);
|
||||
if (images) {
|
||||
@@ -99,13 +117,13 @@ exports.sendPost = (post, source, channel, callback) => {
|
||||
}
|
||||
}
|
||||
|
||||
log.DEBUG("Sending an RSS post to discord", postTitle, postId)
|
||||
log.DEBUG("Sending an RSS post to discord", postTitle, postId, postContent)
|
||||
try{
|
||||
const rssMessage = new this.EmmeliaEmbedBuilder()
|
||||
.setColor(0x0099FF)
|
||||
.setTitle(postTitle)
|
||||
.setURL(postLink)
|
||||
.addFields({ name: "Post Content", value: postContent.slice(0,1024), inline: false })
|
||||
.addFields({ name: "Post Content", value: postContent, inline: false })
|
||||
.addFields({ name: 'Published', value: postPubDate, inline: true })
|
||||
.addFields({ name: 'Source', value: postSourceLink, inline: true });
|
||||
|
||||
@@ -126,3 +144,7 @@ exports.sendPost = (post, source, channel, callback) => {
|
||||
return callback(err, undefined);
|
||||
}
|
||||
}
|
||||
|
||||
exports.returnHash = (...stringsIncluded) => {
|
||||
return crypto.createHash('sha1').update(`${stringsIncluded.join("-<<??//\\\\??>>-")}`).digest("base64");
|
||||
}
|
||||
25
package.json
25
package.json
@@ -8,26 +8,31 @@
|
||||
"@discordjs/rest": "~1.5.0",
|
||||
"axios": "~1.3.4",
|
||||
"chatgpt": "~4.7.2",
|
||||
"cookie-parser": "~1.4.4",
|
||||
"debug": "~2.6.9",
|
||||
"discord-api-types": "~0.37.35",
|
||||
"discord.js": "~14.7.1",
|
||||
"dotenv": "~16.0.3",
|
||||
"ejs": "~2.6.1",
|
||||
"express": "~4.18.2",
|
||||
"fs": "~0.0.1-security",
|
||||
"gpt-3-encoder": "~1.1.4",
|
||||
"http-errors": "~1.6.3",
|
||||
"jsdoc": "~3.6.7",
|
||||
"jsonfile": "~6.1.0",
|
||||
"morgan": "~1.9.1",
|
||||
"mysql": "~2.18.1",
|
||||
"node-html-markdown": "~1.3.0",
|
||||
"node-html-parser": "~6.1.5",
|
||||
"openai": "~3.1.0",
|
||||
"parse-files": "~0.1.1",
|
||||
"rss-parser": "~3.12.0",
|
||||
"mysql": "~2.18.1",
|
||||
"cookie-parser": "~1.4.4",
|
||||
"debug": "~2.6.9",
|
||||
"ejs": "~2.6.1",
|
||||
"http-errors": "~1.6.3",
|
||||
"morgan": "~1.9.1",
|
||||
"node-html-markdown": "~1.3.0",
|
||||
"node-html-parser": "~6.1.5",
|
||||
"gpt-3-encoder": "~1.1.4",
|
||||
"user-agents": "~1.0.1303"
|
||||
"user-agents": "~1.0.1303",
|
||||
"@improbable-eng/grpc-web": "~0.15.0",
|
||||
"@improbable-eng/grpc-web-node-http-transport": "~0.15.0",
|
||||
"google-protobuf": "~3.21.2",
|
||||
"grpc": "~1.24.11",
|
||||
"typescript": "~4.9.5"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
|
||||
263
utilities/stabilityHelpers.js
Normal file
263
utilities/stabilityHelpers.js
Normal file
@@ -0,0 +1,263 @@
|
||||
// Debug
|
||||
const { DebugBuilder } = require("../utilities/debugBuilder");
|
||||
const log = new DebugBuilder("server", "stabilityController");
|
||||
|
||||
// Modules
|
||||
const Generation = require("../controllers/stabilityAi/generation_pb");
|
||||
const { GenerationServiceClient } = require("../controllers/stabilityAi/generation_pb_service");
|
||||
const fs = require("fs");
|
||||
|
||||
const { grpc } = require("@improbable-eng/grpc-web");
|
||||
const GRPCWeb = grpc;
|
||||
|
||||
const path = require("node:path");
|
||||
|
||||
function isImageArtifact(artifact) {
|
||||
log.DEBUG("Checking if image is an artifact", artifact);
|
||||
if (artifact.getType() !== Generation.ArtifactType.ARTIFACT_IMAGE) return false;
|
||||
if (artifact.getFinishReason() !== Generation.FinishReason.NULL) return false;
|
||||
return artifact.hasBinary();
|
||||
}
|
||||
exports.isImageArtifact = isImageArtifact;
|
||||
|
||||
function isNSFWFilteredArtifact(artifact) {
|
||||
if (artifact.getType() !== Generation.ArtifactType.ARTIFACT_IMAGE) return false;
|
||||
if (artifact.getFinishReason() !== Generation.FinishReason.FILTER) return false;
|
||||
return true;
|
||||
}
|
||||
exports.isNSFWFilteredArtifact = isNSFWFilteredArtifact;
|
||||
|
||||
/** Builds a generation request for a specified engine with the specified parameters. */
|
||||
function buildGenerationRequest(engineID, params) {
|
||||
if (params.type === "upscaling") {
|
||||
const request = new Generation.Request()
|
||||
request.setEngineId(engineID)
|
||||
request.setRequestedType(Generation.ArtifactType.ARTIFACT_IMAGE)
|
||||
request.setClassifier(new Generation.ClassifierParameters())
|
||||
|
||||
const imageParams = new Generation.ImageParameters()
|
||||
if ("width" in params && !!params.width) {
|
||||
imageParams.setWidth(params.width)
|
||||
} else if ("height" in params && !!params.height) {
|
||||
imageParams.setHeight(params.height)
|
||||
}
|
||||
request.setImage(imageParams)
|
||||
request.addPrompt(createInitImagePrompt(params.initImage))
|
||||
|
||||
return request
|
||||
}
|
||||
|
||||
const imageParams = new Generation.ImageParameters()
|
||||
if (params.type === "text-to-image") {
|
||||
params.width && imageParams.setWidth(params.width)
|
||||
params.height && imageParams.setHeight(params.height)
|
||||
}
|
||||
|
||||
// Set the number of images to generate (Default 1)
|
||||
params.samples && imageParams.setSamples(params.samples)
|
||||
|
||||
// Set the steps (Default 30)
|
||||
// Represents the amount of inference steps performed on image generation.
|
||||
params.steps && imageParams.setSteps(params.steps)
|
||||
|
||||
// Set the seed (Default 0)
|
||||
// Including a seed will cause the results to be deterministic.
|
||||
// Omitting the seed or setting it to `0` will do the opposite.
|
||||
params.seed && imageParams.addSeed(params.seed)
|
||||
|
||||
// Set the sampler (Default 'automatic')
|
||||
// Omitting this value enables 'automatic' mode where we choose the best sampler for you based
|
||||
// on the current payload. For example, since CLIP guidance only works on ancestral samplers,
|
||||
// when CLIP guidance is enabled, we will automatically choose an ancestral sampler for you.
|
||||
if (params.sampler) {
|
||||
const transformType = new Generation.TransformType()
|
||||
transformType.setDiffusion(params.sampler)
|
||||
imageParams.setTransform(transformType)
|
||||
}
|
||||
|
||||
// Set the Engine
|
||||
// At the time of writing, valid engines are:
|
||||
// stable-diffusion-v1,
|
||||
// stable-diffusion-v1-5
|
||||
// stable-diffusion-512-v2-0
|
||||
// stable-diffusion-768-v2-0
|
||||
// stable-diffusion-512-v2-1
|
||||
// stable-diffusion-768-v2-1
|
||||
// stable-inpainting-v1-0
|
||||
// stable-inpainting-512-v2-0
|
||||
// esrgan-v1-x2plus
|
||||
const request = new Generation.Request()
|
||||
request.setEngineId(engineID)
|
||||
request.setRequestedType(Generation.ArtifactType.ARTIFACT_IMAGE)
|
||||
request.setClassifier(new Generation.ClassifierParameters())
|
||||
|
||||
// Set the CFG scale (Default 7)
|
||||
// Influences how strongly your generation is guided to match your prompt. Higher values match closer.
|
||||
const samplerParams = new Generation.SamplerParameters()
|
||||
params.cfgScale && samplerParams.setCfgScale(params.cfgScale)
|
||||
|
||||
const stepParams = new Generation.StepParameter()
|
||||
stepParams.setScaledStep(0)
|
||||
stepParams.setSampler(samplerParams)
|
||||
|
||||
const scheduleParams = new Generation.ScheduleParameters()
|
||||
if (params.type === "image-to-image") {
|
||||
// If we're doing image-to-image generation then we need to configure
|
||||
// how much influence the initial image has on the diffusion process
|
||||
scheduleParams.setStart(params.stepScheduleStart)
|
||||
if (params.stepScheduleEnd) {
|
||||
scheduleParams.setEnd(params.stepScheduleEnd)
|
||||
}
|
||||
} else if (params.type === "image-to-image-masking") {
|
||||
// Step schedule start is always 1 for masking requests
|
||||
scheduleParams.setStart(1)
|
||||
}
|
||||
|
||||
stepParams.setSchedule(scheduleParams)
|
||||
|
||||
// Set CLIP Guidance (Default: None)
|
||||
// NOTE: This only works with ancestral samplers. Omitting the sampler parameter above will ensure
|
||||
// that we automatically choose an ancestral sampler for you when CLIP guidance is enabled.
|
||||
if (params.clipGuidancePreset) {
|
||||
const guidanceParameters = new Generation.GuidanceParameters()
|
||||
guidanceParameters.setGuidancePreset(params.clipGuidancePreset)
|
||||
stepParams.setGuidance(guidanceParameters)
|
||||
}
|
||||
|
||||
imageParams.addParameters(stepParams)
|
||||
request.setImage(imageParams)
|
||||
|
||||
params.prompts.forEach(textPrompt => {
|
||||
const prompt = new Generation.Prompt()
|
||||
prompt.setText(textPrompt.text)
|
||||
|
||||
// If provided, set the prompt's weight (use negative values for negative weighting)
|
||||
if (textPrompt.weight) {
|
||||
const promptParameters = new Generation.PromptParameters()
|
||||
promptParameters.setWeight(textPrompt.weight)
|
||||
prompt.setParameters(promptParameters)
|
||||
}
|
||||
|
||||
request.addPrompt(prompt)
|
||||
})
|
||||
|
||||
// Add image prompts if we're doing some kind of image-to-image generation or upscaling
|
||||
if (params.type === "image-to-image") {
|
||||
request.addPrompt(createInitImagePrompt(params.initImage))
|
||||
} else if (params.type === "image-to-image-masking") {
|
||||
request.addPrompt(createInitImagePrompt(params.initImage))
|
||||
request.addPrompt(createMaskImagePrompt(params.maskImage))
|
||||
}
|
||||
|
||||
return request
|
||||
}
|
||||
exports.buildGenerationRequest = buildGenerationRequest;
|
||||
|
||||
function createInitImagePrompt(imageBinary) {
|
||||
const initImageArtifact = new Generation.Artifact()
|
||||
initImageArtifact.setBinary(imageBinary)
|
||||
initImageArtifact.setType(Generation.ArtifactType.ARTIFACT_IMAGE)
|
||||
|
||||
const initImageParameters = new Generation.PromptParameters()
|
||||
initImageParameters.setInit(true)
|
||||
|
||||
const initImagePrompt = new Generation.Prompt()
|
||||
initImagePrompt.setParameters(initImageParameters)
|
||||
initImagePrompt.setArtifact(initImageArtifact)
|
||||
|
||||
return initImagePrompt
|
||||
}
|
||||
|
||||
function createMaskImagePrompt(imageBinary) {
|
||||
const maskImageArtifact = new Generation.Artifact()
|
||||
maskImageArtifact.setBinary(imageBinary)
|
||||
maskImageArtifact.setType(Generation.ArtifactType.ARTIFACT_MASK)
|
||||
|
||||
const maskImagePrompt = new Generation.Prompt()
|
||||
maskImagePrompt.setArtifact(maskImageArtifact)
|
||||
|
||||
return maskImagePrompt
|
||||
}
|
||||
|
||||
/** Executes a GenerationRequest, abstracting the gRPC streaming result behind a Promise */
|
||||
async function executeGenerationRequest(
|
||||
generationClient,
|
||||
request,
|
||||
metadata
|
||||
) {
|
||||
try {
|
||||
const stream = generationClient.generate(request, metadata)
|
||||
const answers = await new Promise((resolve, reject) => {
|
||||
const answers = new Array()
|
||||
|
||||
stream.on("data", data => answers.push(data))
|
||||
stream.on("end", () => resolve(answers))
|
||||
stream.on("status", status => {
|
||||
if (status.code === 0) return
|
||||
reject(status.details)
|
||||
})
|
||||
})
|
||||
|
||||
return extractArtifacts(answers)
|
||||
} catch (err) {
|
||||
return err instanceof Error ? err : new Error(JSON.stringify(err))
|
||||
}
|
||||
}
|
||||
exports.executeGenerationRequest = executeGenerationRequest;
|
||||
|
||||
function extractArtifacts(answers) {
|
||||
const imageArtifacts = new Array()
|
||||
const filteredArtifacts = new Array()
|
||||
|
||||
for (const answer of answers) {
|
||||
for (const artifact of answer.getArtifactsList()) {
|
||||
if (isImageArtifact(artifact)) {
|
||||
imageArtifacts.push(artifact)
|
||||
} else if (isNSFWFilteredArtifact(artifact)) {
|
||||
filteredArtifacts.push(artifact)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { filteredArtifacts, imageArtifacts }
|
||||
}
|
||||
|
||||
/** Generation completion handler - replace this with your own logic */
|
||||
function onGenerationComplete(response) {
|
||||
if (response instanceof Error) {
|
||||
log.ERROR("Generation failed", response)
|
||||
throw response
|
||||
}
|
||||
|
||||
log.DEBUG(
|
||||
`${response.imageArtifacts.length} image${
|
||||
response.imageArtifacts.length > 1 ? "s" : ""
|
||||
} were successfully generated.`
|
||||
)
|
||||
|
||||
// Do something with NSFW filtered artifacts
|
||||
if (response.filteredArtifacts.length > 0) {
|
||||
log.DEBUG(
|
||||
`${response.filteredArtifacts.length} artifact` +
|
||||
`${response.filteredArtifacts.length > 1 ? "s" : ""}` +
|
||||
` were filtered by the NSFW classifier and need to be retried.`
|
||||
)
|
||||
}
|
||||
|
||||
// Do something with the successful image artifacts
|
||||
response.imageArtifacts.forEach(artifact => {
|
||||
try {
|
||||
const writePath =
|
||||
fs.writeFileSync(
|
||||
path.resolve(__dirname, `../.generations/image-${artifact.getSeed()}.png`),
|
||||
Buffer.from(artifact.getBinary_asU8())
|
||||
)
|
||||
} catch (error) {
|
||||
log.ERROR("Failed to write resulting image to disk", error)
|
||||
}
|
||||
})
|
||||
|
||||
// For browser implementations: you could use the `artifact.getBinary_asB64()` method to get a
|
||||
// base64 encoded string and then create a data URL from that and display it in an <img> tag.
|
||||
}
|
||||
exports.onGenerationComplete = onGenerationComplete;
|
||||
Reference in New Issue
Block a user