Compare commits
2 Commits
24296c2ae4
...
f29459aadb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f29459aadb | ||
|
|
2cd5eee940 |
@@ -22,7 +22,7 @@ export const gptInteraction = async (nodeIo, message) => {
|
|||||||
if (msg.author.id === nodeIo.serverClient.user.id) {
|
if (msg.author.id === nodeIo.serverClient.user.id) {
|
||||||
conversation.push({
|
conversation.push({
|
||||||
role: 'assistant',
|
role: 'assistant',
|
||||||
name: msg.author.id,
|
//name: msg.author.id,
|
||||||
content: msg.content,
|
content: msg.content,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -31,13 +31,13 @@ export const gptInteraction = async (nodeIo, message) => {
|
|||||||
|
|
||||||
conversation.push({
|
conversation.push({
|
||||||
role: 'user',
|
role: 'user',
|
||||||
name: msg.author.id,
|
//name: msg.author.id,
|
||||||
content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''),
|
content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''),
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
const response = await gptHandler(conversation);
|
const response = await gptHandler(conversation);
|
||||||
if (response) {
|
if (response) {
|
||||||
const responseMessage = response.choices[0].message.content;
|
const responseMessage = response;
|
||||||
const chunkSize = 2500;
|
const chunkSize = 2500;
|
||||||
|
|
||||||
for (let i = 0; i < responseMessage.length; i += chunkSize) {
|
for (let i = 0; i < responseMessage.length; i += chunkSize) {
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ export const linkCop = async (nodeIo, message) => {
|
|||||||
if (msg.author.id === nodeIo.serverClient.user.id) {
|
if (msg.author.id === nodeIo.serverClient.user.id) {
|
||||||
conversation.push({
|
conversation.push({
|
||||||
role: 'assistant',
|
role: 'assistant',
|
||||||
name: msg.author.id,
|
//name: msg.author.id,
|
||||||
content: msg.content,
|
content: msg.content,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -44,20 +44,20 @@ export const linkCop = async (nodeIo, message) => {
|
|||||||
|
|
||||||
conversation.push({
|
conversation.push({
|
||||||
role: 'user',
|
role: 'user',
|
||||||
name: msg.author.id,
|
//name: msg.author.id,
|
||||||
content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''),
|
content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''),
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
conversation.push({
|
conversation.push({
|
||||||
role: 'system',
|
role: 'assistant',
|
||||||
content: `There has been a link posted to a channel that links are not allowed in. The above messages are from the channel that links are not allowed including the message with the link. The message with the link is going to be deleted and moved to the '#links' channels. You should let the user know.`
|
content: `There has been a link posted to a channel that links are not allowed in. The above messages are from the channel that links are not allowed including the message with the link. The message with the link is going to be deleted and moved to the '#links' channels. You are replying to the message with the link to let the user know.`
|
||||||
});
|
});
|
||||||
|
|
||||||
const response = await gptHandler(conversation);
|
const response = await gptHandler(conversation);
|
||||||
|
|
||||||
if (response) {
|
if (response) {
|
||||||
const responseMessage = response.choices[0].message.content;
|
const responseMessage = response;
|
||||||
const chunkSize = 2000;
|
const chunkSize = 2000;
|
||||||
|
|
||||||
for (let i = 0; i < responseMessage.length; i += chunkSize) {
|
for (let i = 0; i < responseMessage.length; i += chunkSize) {
|
||||||
|
|||||||
52
discordBot/commands/connections.mjs
Normal file
52
discordBot/commands/connections.mjs
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import { DebugBuilder } from "../../modules/debugger.mjs";
|
||||||
|
const log = new DebugBuilder("server", "discordBot.command.ping");
|
||||||
|
import { SlashCommandBuilder } from 'discord.js';
|
||||||
|
|
||||||
|
// Exporting data property that contains the command structure for discord including any params
|
||||||
|
export const data = new SlashCommandBuilder()
|
||||||
|
.setName('connections')
|
||||||
|
.setDescription('Check to see what bots are online.');
|
||||||
|
|
||||||
|
// Exporting other properties
|
||||||
|
export const example = "/connections"; // An example of how the command would be run in discord chat, this will be used for the help command
|
||||||
|
export const deferInitialReply = false; // If we the initial reply in discord should be deferred. This gives extra time to respond, however the method of replying is different.
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Function to give the user auto-reply suggestions
|
||||||
|
* @param {any} nodeIo The nodeIO server for manipulation of sockets
|
||||||
|
* @param {any} interaction The interaction object
|
||||||
|
*/
|
||||||
|
/*
|
||||||
|
export async function autocomplete(nodeIo, interaction) {
|
||||||
|
const focusedValue = interaction.options.getFocused();
|
||||||
|
const choices = []; // The array to be filled with the autocorrect values
|
||||||
|
const filtered = choices.filter(choice => choice.name.startsWith(focusedValue));
|
||||||
|
log.INFO(focusedValue, choices, filtered);
|
||||||
|
await interaction.respond(filtered.map(choice => ({name: choice.name, value: choice.name})));
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The function to run when the command is called by a discord user
|
||||||
|
* @param {any} nodeIo The nodeIO server for manipulation of sockets
|
||||||
|
* @param {any} interaction The interaction object
|
||||||
|
*/
|
||||||
|
export const execute = async (nodeIo, interaction) => {
|
||||||
|
try {
|
||||||
|
const sockets = await nodeIo.allSockets();
|
||||||
|
log.DEBUG("All open sockets: ",sockets);
|
||||||
|
let socketMessage = "";
|
||||||
|
|
||||||
|
// Create the message for discord with each socket on a new line
|
||||||
|
sockets.forEach(socket => {
|
||||||
|
socketMessage += `\n${socket}`
|
||||||
|
});
|
||||||
|
|
||||||
|
await interaction.reply(`**Online Sockets: '${socketMessage}'**`);
|
||||||
|
//await interaction.reply('**Pong.**');
|
||||||
|
//await interaction.channel.send('**Pong.**');
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
// await interaction.reply(err.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,25 +4,108 @@ import dotenv from 'dotenv';
|
|||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
import { OpenAI } from 'openai';
|
import { OpenAI } from 'openai';
|
||||||
|
import { EventEmitter } from 'events';
|
||||||
|
|
||||||
const openai = new OpenAI(process.env.OPENAI_API_KEY);
|
const openai = new OpenAI(process.env.OPENAI_API_KEY);
|
||||||
|
|
||||||
let conversation = [];
|
const assistant = await openai.beta.assistants.create({
|
||||||
|
name: "Emmelia",
|
||||||
conversation.push({
|
instructions: process.env.DRB_SERVER_INITIAL_PROMPT,
|
||||||
role: 'system',
|
model: "gpt-4o",
|
||||||
content: process.env.DRB_SERVER_INITIAL_PROMPT
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
class EventHandler extends EventEmitter {
|
||||||
|
constructor(client) {
|
||||||
|
super();
|
||||||
|
this.client = client;
|
||||||
|
}
|
||||||
|
|
||||||
|
async onEvent(event) {
|
||||||
|
try {
|
||||||
|
console.log(event);
|
||||||
|
// Retrieve events that are denoted with 'requires_action'
|
||||||
|
// since these will have our tool_calls
|
||||||
|
if (event.event === "thread.run.requires_action") {
|
||||||
|
await this.handleRequiresAction(
|
||||||
|
event.data,
|
||||||
|
event.data.id,
|
||||||
|
event.data.thread_id,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error handling event:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async handleRequiresAction(data, runId, threadId) {
|
||||||
|
try {
|
||||||
|
const toolOutputs =
|
||||||
|
data.required_action.submit_tool_outputs.tool_calls.map((toolCall) => {
|
||||||
|
// Call the function
|
||||||
|
switch (toolCall.function.name) {
|
||||||
|
case "getCurrentTemperature": return {
|
||||||
|
tool_call_id: toolCall.id,
|
||||||
|
output: "57",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Submit all the tool outputs at the same time
|
||||||
|
await this.submitToolOutputs(toolOutputs, runId, threadId);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error processing required action:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async submitToolOutputs(toolOutputs, runId, threadId) {
|
||||||
|
try {
|
||||||
|
// Use the submitToolOutputsStream helper
|
||||||
|
const stream = this.client.beta.threads.runs.submitToolOutputsStream(
|
||||||
|
threadId,
|
||||||
|
runId,
|
||||||
|
{ tool_outputs: toolOutputs },
|
||||||
|
);
|
||||||
|
for await (const event of stream) {
|
||||||
|
this.emit("event", event);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error submitting tool outputs:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const eventHandler = new EventHandler(openai);
|
||||||
|
eventHandler.on("event", eventHandler.onEvent.bind(eventHandler));
|
||||||
|
|
||||||
export const gptHandler = async (additionalMessages) => {
|
export const gptHandler = async (additionalMessages) => {
|
||||||
|
const thread = await openai.beta.threads.create();
|
||||||
|
|
||||||
// Add the additional messages to the conversation
|
// Add the additional messages to the conversation
|
||||||
conversation = conversation.concat(additionalMessages);
|
for (const msgObj of additionalMessages) {
|
||||||
log.DEBUG("AI Conversation:", conversation);
|
await openai.beta.threads.messages.create(
|
||||||
|
thread.id,
|
||||||
|
msgObj
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.DEBUG("AI Conversation:", thread);
|
||||||
|
|
||||||
|
// Run the thread to get a response
|
||||||
try {
|
try {
|
||||||
const response = await openai.chat.completions.create({
|
const stream = await openai.beta.threads.runs.stream(
|
||||||
model: 'gpt-3.5-turbo',
|
thread.id,
|
||||||
messages: conversation,
|
{ assistant_id: assistant.id },
|
||||||
}).catch((error) => log.ERROR("OpenAI Error: ", error));
|
eventHandler,
|
||||||
|
);
|
||||||
|
|
||||||
|
for await (const event of stream) {
|
||||||
|
eventHandler.emit("event", event);
|
||||||
|
}
|
||||||
|
|
||||||
|
let response;
|
||||||
|
const messages = await openai.beta.threads.messages.list(
|
||||||
|
thread.id
|
||||||
|
);
|
||||||
|
response = messages.data[0].content[0].text.value;
|
||||||
|
|
||||||
log.DEBUG("AI Response:", response);
|
log.DEBUG("AI Response:", response);
|
||||||
|
|
||||||
@@ -30,7 +113,7 @@ export const gptHandler = async (additionalMessages) => {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return response
|
return response;
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error generating response:', error);
|
console.error('Error generating response:', error);
|
||||||
|
|||||||
Reference in New Issue
Block a user