From 2cd5eee94052f21cfe6d9a1db2ad0664e901e433 Mon Sep 17 00:00:00 2001 From: Logan Cusano Date: Sun, 14 Jul 2024 15:47:46 -0400 Subject: [PATCH] Implement OpenAI Assistant API - Updated linkCop - Updated standard interaction handler --- discordBot/addons/gptInteraction.mjs | 6 +- discordBot/addons/linkCop.mjs | 10 +-- discordBot/modules/gptHandler.mjs | 113 +++++++++++++++++++++++---- 3 files changed, 106 insertions(+), 23 deletions(-) diff --git a/discordBot/addons/gptInteraction.mjs b/discordBot/addons/gptInteraction.mjs index f145703..db14521 100644 --- a/discordBot/addons/gptInteraction.mjs +++ b/discordBot/addons/gptInteraction.mjs @@ -22,7 +22,7 @@ export const gptInteraction = async (nodeIo, message) => { if (msg.author.id === nodeIo.serverClient.user.id) { conversation.push({ role: 'assistant', - name: msg.author.id, + //name: msg.author.id, content: msg.content, }); @@ -31,13 +31,13 @@ export const gptInteraction = async (nodeIo, message) => { conversation.push({ role: 'user', - name: msg.author.id, + //name: msg.author.id, content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''), }); }); const response = await gptHandler(conversation); if (response) { - const responseMessage = response.choices[0].message.content; + const responseMessage = response; const chunkSize = 2500; for (let i = 0; i < responseMessage.length; i += chunkSize) { diff --git a/discordBot/addons/linkCop.mjs b/discordBot/addons/linkCop.mjs index dc97f11..3cfcc48 100644 --- a/discordBot/addons/linkCop.mjs +++ b/discordBot/addons/linkCop.mjs @@ -35,7 +35,7 @@ export const linkCop = async (nodeIo, message) => { if (msg.author.id === nodeIo.serverClient.user.id) { conversation.push({ role: 'assistant', - name: msg.author.id, + //name: msg.author.id, content: msg.content, }); @@ -44,20 +44,20 @@ export const linkCop = async (nodeIo, message) => { conversation.push({ role: 'user', - name: msg.author.id, + //name: msg.author.id, content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''), }); }); conversation.push({ - role: 'system', - content: `There has been a link posted to a channel that links are not allowed in. The above messages are from the channel that links are not allowed including the message with the link. The message with the link is going to be deleted and moved to the '#links' channels. You should let the user know.` + role: 'assistant', + content: `There has been a link posted to a channel that links are not allowed in. The above messages are from the channel that links are not allowed including the message with the link. The message with the link is going to be deleted and moved to the '#links' channels. You are replying to the message with the link to let the user know.` }); const response = await gptHandler(conversation); if (response) { - const responseMessage = response.choices[0].message.content; + const responseMessage = response; const chunkSize = 2000; for (let i = 0; i < responseMessage.length; i += chunkSize) { diff --git a/discordBot/modules/gptHandler.mjs b/discordBot/modules/gptHandler.mjs index ad2bf09..ddf3e5f 100644 --- a/discordBot/modules/gptHandler.mjs +++ b/discordBot/modules/gptHandler.mjs @@ -4,36 +4,119 @@ import dotenv from 'dotenv'; dotenv.config(); import { OpenAI } from 'openai'; +import { EventEmitter } from 'events'; const openai = new OpenAI(process.env.OPENAI_API_KEY); -let conversation = []; - -conversation.push({ - role: 'system', - content: process.env.DRB_SERVER_INITIAL_PROMPT +const assistant = await openai.beta.assistants.create({ + name: "Emmelia", + instructions: process.env.DRB_SERVER_INITIAL_PROMPT, + model: "gpt-4o", }); +class EventHandler extends EventEmitter { + constructor(client) { + super(); + this.client = client; + } + + async onEvent(event) { + try { + console.log(event); + // Retrieve events that are denoted with 'requires_action' + // since these will have our tool_calls + if (event.event === "thread.run.requires_action") { + await this.handleRequiresAction( + event.data, + event.data.id, + event.data.thread_id, + ); + } + } catch (error) { + console.error("Error handling event:", error); + } + } + + async handleRequiresAction(data, runId, threadId) { + try { + const toolOutputs = + data.required_action.submit_tool_outputs.tool_calls.map((toolCall) => { + // Call the function + switch (toolCall.function.name) { + case "getCurrentTemperature": return { + tool_call_id: toolCall.id, + output: "57", + }; + } + }); + // Submit all the tool outputs at the same time + await this.submitToolOutputs(toolOutputs, runId, threadId); + } catch (error) { + console.error("Error processing required action:", error); + } + } + + async submitToolOutputs(toolOutputs, runId, threadId) { + try { + // Use the submitToolOutputsStream helper + const stream = this.client.beta.threads.runs.submitToolOutputsStream( + threadId, + runId, + { tool_outputs: toolOutputs }, + ); + for await (const event of stream) { + this.emit("event", event); + } + } catch (error) { + console.error("Error submitting tool outputs:", error); + } + } +} + +const eventHandler = new EventHandler(openai); +eventHandler.on("event", eventHandler.onEvent.bind(eventHandler)); + export const gptHandler = async (additionalMessages) => { + const thread = await openai.beta.threads.create(); + // Add the additional messages to the conversation - conversation = conversation.concat(additionalMessages); - log.DEBUG("AI Conversation:", conversation); + for (const msgObj of additionalMessages) { + await openai.beta.threads.messages.create( + thread.id, + msgObj + ); + } + + log.DEBUG("AI Conversation:", thread); + + // Run the thread to get a response try { - const response = await openai.chat.completions.create({ - model: 'gpt-3.5-turbo', - messages: conversation, - }).catch((error) => log.ERROR("OpenAI Error: ", error)); + const stream = await openai.beta.threads.runs.stream( + thread.id, + { assistant_id: assistant.id }, + eventHandler, + ); + + for await (const event of stream) { + eventHandler.emit("event", event); + } + + let response; + const messages = await openai.beta.threads.messages.list( + thread.id + ); + response = messages.data[0].content[0].text.value; log.DEBUG("AI Response:", response); - if (!response) { + if (!response) { return false; } - return response + return response; } catch (error) { console.error('Error generating response:', error); - return false; + return false; } -} \ No newline at end of file +}