Compare commits

..

2 Commits

Author SHA1 Message Date
Logan Cusano
f29459aadb Added new connections command for debug
Some checks failed
DRB Tests / drb_mocha_tests (push) Failing after 35s
release-tag / release-image (push) Failing after 35s
2024-07-14 16:50:48 -04:00
Logan Cusano
2cd5eee940 Implement OpenAI Assistant API
- Updated linkCop
- Updated standard interaction handler
2024-07-14 15:47:46 -04:00
4 changed files with 158 additions and 23 deletions

View File

@@ -22,7 +22,7 @@ export const gptInteraction = async (nodeIo, message) => {
if (msg.author.id === nodeIo.serverClient.user.id) { if (msg.author.id === nodeIo.serverClient.user.id) {
conversation.push({ conversation.push({
role: 'assistant', role: 'assistant',
name: msg.author.id, //name: msg.author.id,
content: msg.content, content: msg.content,
}); });
@@ -31,13 +31,13 @@ export const gptInteraction = async (nodeIo, message) => {
conversation.push({ conversation.push({
role: 'user', role: 'user',
name: msg.author.id, //name: msg.author.id,
content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''), content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''),
}); });
}); });
const response = await gptHandler(conversation); const response = await gptHandler(conversation);
if (response) { if (response) {
const responseMessage = response.choices[0].message.content; const responseMessage = response;
const chunkSize = 2500; const chunkSize = 2500;
for (let i = 0; i < responseMessage.length; i += chunkSize) { for (let i = 0; i < responseMessage.length; i += chunkSize) {

View File

@@ -35,7 +35,7 @@ export const linkCop = async (nodeIo, message) => {
if (msg.author.id === nodeIo.serverClient.user.id) { if (msg.author.id === nodeIo.serverClient.user.id) {
conversation.push({ conversation.push({
role: 'assistant', role: 'assistant',
name: msg.author.id, //name: msg.author.id,
content: msg.content, content: msg.content,
}); });
@@ -44,20 +44,20 @@ export const linkCop = async (nodeIo, message) => {
conversation.push({ conversation.push({
role: 'user', role: 'user',
name: msg.author.id, //name: msg.author.id,
content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''), content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''),
}); });
}); });
conversation.push({ conversation.push({
role: 'system', role: 'assistant',
content: `There has been a link posted to a channel that links are not allowed in. The above messages are from the channel that links are not allowed including the message with the link. The message with the link is going to be deleted and moved to the '#links' channels. You should let the user know.` content: `There has been a link posted to a channel that links are not allowed in. The above messages are from the channel that links are not allowed including the message with the link. The message with the link is going to be deleted and moved to the '#links' channels. You are replying to the message with the link to let the user know.`
}); });
const response = await gptHandler(conversation); const response = await gptHandler(conversation);
if (response) { if (response) {
const responseMessage = response.choices[0].message.content; const responseMessage = response;
const chunkSize = 2000; const chunkSize = 2000;
for (let i = 0; i < responseMessage.length; i += chunkSize) { for (let i = 0; i < responseMessage.length; i += chunkSize) {

View File

@@ -0,0 +1,52 @@
import { DebugBuilder } from "../../modules/debugger.mjs";
const log = new DebugBuilder("server", "discordBot.command.ping");
import { SlashCommandBuilder } from 'discord.js';
// Exporting data property that contains the command structure for discord including any params
export const data = new SlashCommandBuilder()
.setName('connections')
.setDescription('Check to see what bots are online.');
// Exporting other properties
export const example = "/connections"; // An example of how the command would be run in discord chat, this will be used for the help command
export const deferInitialReply = false; // If we the initial reply in discord should be deferred. This gives extra time to respond, however the method of replying is different.
/**
* Function to give the user auto-reply suggestions
* @param {any} nodeIo The nodeIO server for manipulation of sockets
* @param {any} interaction The interaction object
*/
/*
export async function autocomplete(nodeIo, interaction) {
const focusedValue = interaction.options.getFocused();
const choices = []; // The array to be filled with the autocorrect values
const filtered = choices.filter(choice => choice.name.startsWith(focusedValue));
log.INFO(focusedValue, choices, filtered);
await interaction.respond(filtered.map(choice => ({name: choice.name, value: choice.name})));
}
*/
/**
* The function to run when the command is called by a discord user
* @param {any} nodeIo The nodeIO server for manipulation of sockets
* @param {any} interaction The interaction object
*/
export const execute = async (nodeIo, interaction) => {
try {
const sockets = await nodeIo.allSockets();
log.DEBUG("All open sockets: ",sockets);
let socketMessage = "";
// Create the message for discord with each socket on a new line
sockets.forEach(socket => {
socketMessage += `\n${socket}`
});
await interaction.reply(`**Online Sockets: '${socketMessage}'**`);
//await interaction.reply('**Pong.**');
//await interaction.channel.send('**Pong.**');
} catch (err) {
console.error(err);
// await interaction.reply(err.toString());
}
}

View File

@@ -4,25 +4,108 @@ import dotenv from 'dotenv';
dotenv.config(); dotenv.config();
import { OpenAI } from 'openai'; import { OpenAI } from 'openai';
import { EventEmitter } from 'events';
const openai = new OpenAI(process.env.OPENAI_API_KEY); const openai = new OpenAI(process.env.OPENAI_API_KEY);
let conversation = []; const assistant = await openai.beta.assistants.create({
name: "Emmelia",
conversation.push({ instructions: process.env.DRB_SERVER_INITIAL_PROMPT,
role: 'system', model: "gpt-4o",
content: process.env.DRB_SERVER_INITIAL_PROMPT
}); });
export const gptHandler = async (additionalMessages) => { class EventHandler extends EventEmitter {
// Add the additional messages to the conversation constructor(client) {
conversation = conversation.concat(additionalMessages); super();
log.DEBUG("AI Conversation:", conversation); this.client = client;
}
async onEvent(event) {
try { try {
const response = await openai.chat.completions.create({ console.log(event);
model: 'gpt-3.5-turbo', // Retrieve events that are denoted with 'requires_action'
messages: conversation, // since these will have our tool_calls
}).catch((error) => log.ERROR("OpenAI Error: ", error)); if (event.event === "thread.run.requires_action") {
await this.handleRequiresAction(
event.data,
event.data.id,
event.data.thread_id,
);
}
} catch (error) {
console.error("Error handling event:", error);
}
}
async handleRequiresAction(data, runId, threadId) {
try {
const toolOutputs =
data.required_action.submit_tool_outputs.tool_calls.map((toolCall) => {
// Call the function
switch (toolCall.function.name) {
case "getCurrentTemperature": return {
tool_call_id: toolCall.id,
output: "57",
};
}
});
// Submit all the tool outputs at the same time
await this.submitToolOutputs(toolOutputs, runId, threadId);
} catch (error) {
console.error("Error processing required action:", error);
}
}
async submitToolOutputs(toolOutputs, runId, threadId) {
try {
// Use the submitToolOutputsStream helper
const stream = this.client.beta.threads.runs.submitToolOutputsStream(
threadId,
runId,
{ tool_outputs: toolOutputs },
);
for await (const event of stream) {
this.emit("event", event);
}
} catch (error) {
console.error("Error submitting tool outputs:", error);
}
}
}
const eventHandler = new EventHandler(openai);
eventHandler.on("event", eventHandler.onEvent.bind(eventHandler));
export const gptHandler = async (additionalMessages) => {
const thread = await openai.beta.threads.create();
// Add the additional messages to the conversation
for (const msgObj of additionalMessages) {
await openai.beta.threads.messages.create(
thread.id,
msgObj
);
}
log.DEBUG("AI Conversation:", thread);
// Run the thread to get a response
try {
const stream = await openai.beta.threads.runs.stream(
thread.id,
{ assistant_id: assistant.id },
eventHandler,
);
for await (const event of stream) {
eventHandler.emit("event", event);
}
let response;
const messages = await openai.beta.threads.messages.list(
thread.id
);
response = messages.data[0].content[0].text.value;
log.DEBUG("AI Response:", response); log.DEBUG("AI Response:", response);
@@ -30,7 +113,7 @@ export const gptHandler = async (additionalMessages) => {
return false; return false;
} }
return response return response;
} catch (error) { } catch (error) {
console.error('Error generating response:', error); console.error('Error generating response:', error);