Implement OpenAI Assistant API

- Updated linkCop
- Updated standard interaction handler
This commit is contained in:
Logan Cusano
2024-07-14 15:47:46 -04:00
parent 24296c2ae4
commit 2cd5eee940
3 changed files with 106 additions and 23 deletions

View File

@@ -22,7 +22,7 @@ export const gptInteraction = async (nodeIo, message) => {
if (msg.author.id === nodeIo.serverClient.user.id) {
conversation.push({
role: 'assistant',
name: msg.author.id,
//name: msg.author.id,
content: msg.content,
});
@@ -31,13 +31,13 @@ export const gptInteraction = async (nodeIo, message) => {
conversation.push({
role: 'user',
name: msg.author.id,
//name: msg.author.id,
content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''),
});
});
const response = await gptHandler(conversation);
if (response) {
const responseMessage = response.choices[0].message.content;
const responseMessage = response;
const chunkSize = 2500;
for (let i = 0; i < responseMessage.length; i += chunkSize) {

View File

@@ -35,7 +35,7 @@ export const linkCop = async (nodeIo, message) => {
if (msg.author.id === nodeIo.serverClient.user.id) {
conversation.push({
role: 'assistant',
name: msg.author.id,
//name: msg.author.id,
content: msg.content,
});
@@ -44,20 +44,20 @@ export const linkCop = async (nodeIo, message) => {
conversation.push({
role: 'user',
name: msg.author.id,
//name: msg.author.id,
content: msg.content.replace(`<@${nodeIo.serverClient.user.id}>`, ''),
});
});
conversation.push({
role: 'system',
content: `There has been a link posted to a channel that links are not allowed in. The above messages are from the channel that links are not allowed including the message with the link. The message with the link is going to be deleted and moved to the '#links' channels. You should let the user know.`
role: 'assistant',
content: `There has been a link posted to a channel that links are not allowed in. The above messages are from the channel that links are not allowed including the message with the link. The message with the link is going to be deleted and moved to the '#links' channels. You are replying to the message with the link to let the user know.`
});
const response = await gptHandler(conversation);
if (response) {
const responseMessage = response.choices[0].message.content;
const responseMessage = response;
const chunkSize = 2000;
for (let i = 0; i < responseMessage.length; i += chunkSize) {

View File

@@ -4,36 +4,119 @@ import dotenv from 'dotenv';
dotenv.config();
import { OpenAI } from 'openai';
import { EventEmitter } from 'events';
const openai = new OpenAI(process.env.OPENAI_API_KEY);
let conversation = [];
conversation.push({
role: 'system',
content: process.env.DRB_SERVER_INITIAL_PROMPT
const assistant = await openai.beta.assistants.create({
name: "Emmelia",
instructions: process.env.DRB_SERVER_INITIAL_PROMPT,
model: "gpt-4o",
});
class EventHandler extends EventEmitter {
constructor(client) {
super();
this.client = client;
}
async onEvent(event) {
try {
console.log(event);
// Retrieve events that are denoted with 'requires_action'
// since these will have our tool_calls
if (event.event === "thread.run.requires_action") {
await this.handleRequiresAction(
event.data,
event.data.id,
event.data.thread_id,
);
}
} catch (error) {
console.error("Error handling event:", error);
}
}
async handleRequiresAction(data, runId, threadId) {
try {
const toolOutputs =
data.required_action.submit_tool_outputs.tool_calls.map((toolCall) => {
// Call the function
switch (toolCall.function.name) {
case "getCurrentTemperature": return {
tool_call_id: toolCall.id,
output: "57",
};
}
});
// Submit all the tool outputs at the same time
await this.submitToolOutputs(toolOutputs, runId, threadId);
} catch (error) {
console.error("Error processing required action:", error);
}
}
async submitToolOutputs(toolOutputs, runId, threadId) {
try {
// Use the submitToolOutputsStream helper
const stream = this.client.beta.threads.runs.submitToolOutputsStream(
threadId,
runId,
{ tool_outputs: toolOutputs },
);
for await (const event of stream) {
this.emit("event", event);
}
} catch (error) {
console.error("Error submitting tool outputs:", error);
}
}
}
const eventHandler = new EventHandler(openai);
eventHandler.on("event", eventHandler.onEvent.bind(eventHandler));
export const gptHandler = async (additionalMessages) => {
const thread = await openai.beta.threads.create();
// Add the additional messages to the conversation
conversation = conversation.concat(additionalMessages);
log.DEBUG("AI Conversation:", conversation);
for (const msgObj of additionalMessages) {
await openai.beta.threads.messages.create(
thread.id,
msgObj
);
}
log.DEBUG("AI Conversation:", thread);
// Run the thread to get a response
try {
const response = await openai.chat.completions.create({
model: 'gpt-3.5-turbo',
messages: conversation,
}).catch((error) => log.ERROR("OpenAI Error: ", error));
const stream = await openai.beta.threads.runs.stream(
thread.id,
{ assistant_id: assistant.id },
eventHandler,
);
for await (const event of stream) {
eventHandler.emit("event", event);
}
let response;
const messages = await openai.beta.threads.messages.list(
thread.id
);
response = messages.data[0].content[0].text.value;
log.DEBUG("AI Response:", response);
if (!response) {
if (!response) {
return false;
}
return response
return response;
} catch (error) {
console.error('Error generating response:', error);
return false;
return false;
}
}
}