Implement OpenAI Assistant API

- Updated linkCop
- Updated standard interaction handler
This commit is contained in:
Logan Cusano
2024-07-14 15:47:46 -04:00
parent 24296c2ae4
commit 2cd5eee940
3 changed files with 106 additions and 23 deletions

View File

@@ -4,36 +4,119 @@ import dotenv from 'dotenv';
dotenv.config();
import { OpenAI } from 'openai';
import { EventEmitter } from 'events';
const openai = new OpenAI(process.env.OPENAI_API_KEY);
let conversation = [];
conversation.push({
role: 'system',
content: process.env.DRB_SERVER_INITIAL_PROMPT
const assistant = await openai.beta.assistants.create({
name: "Emmelia",
instructions: process.env.DRB_SERVER_INITIAL_PROMPT,
model: "gpt-4o",
});
class EventHandler extends EventEmitter {
constructor(client) {
super();
this.client = client;
}
async onEvent(event) {
try {
console.log(event);
// Retrieve events that are denoted with 'requires_action'
// since these will have our tool_calls
if (event.event === "thread.run.requires_action") {
await this.handleRequiresAction(
event.data,
event.data.id,
event.data.thread_id,
);
}
} catch (error) {
console.error("Error handling event:", error);
}
}
async handleRequiresAction(data, runId, threadId) {
try {
const toolOutputs =
data.required_action.submit_tool_outputs.tool_calls.map((toolCall) => {
// Call the function
switch (toolCall.function.name) {
case "getCurrentTemperature": return {
tool_call_id: toolCall.id,
output: "57",
};
}
});
// Submit all the tool outputs at the same time
await this.submitToolOutputs(toolOutputs, runId, threadId);
} catch (error) {
console.error("Error processing required action:", error);
}
}
async submitToolOutputs(toolOutputs, runId, threadId) {
try {
// Use the submitToolOutputsStream helper
const stream = this.client.beta.threads.runs.submitToolOutputsStream(
threadId,
runId,
{ tool_outputs: toolOutputs },
);
for await (const event of stream) {
this.emit("event", event);
}
} catch (error) {
console.error("Error submitting tool outputs:", error);
}
}
}
const eventHandler = new EventHandler(openai);
eventHandler.on("event", eventHandler.onEvent.bind(eventHandler));
export const gptHandler = async (additionalMessages) => {
const thread = await openai.beta.threads.create();
// Add the additional messages to the conversation
conversation = conversation.concat(additionalMessages);
log.DEBUG("AI Conversation:", conversation);
for (const msgObj of additionalMessages) {
await openai.beta.threads.messages.create(
thread.id,
msgObj
);
}
log.DEBUG("AI Conversation:", thread);
// Run the thread to get a response
try {
const response = await openai.chat.completions.create({
model: 'gpt-3.5-turbo',
messages: conversation,
}).catch((error) => log.ERROR("OpenAI Error: ", error));
const stream = await openai.beta.threads.runs.stream(
thread.id,
{ assistant_id: assistant.id },
eventHandler,
);
for await (const event of stream) {
eventHandler.emit("event", event);
}
let response;
const messages = await openai.beta.threads.messages.list(
thread.id
);
response = messages.data[0].content[0].text.value;
log.DEBUG("AI Response:", response);
if (!response) {
if (!response) {
return false;
}
return response
return response;
} catch (error) {
console.error('Error generating response:', error);
return false;
return false;
}
}
}