From fcb478930da700efe1ebfdf920b037ccc149447b Mon Sep 17 00:00:00 2001 From: Logan Cusano Date: Sun, 26 Feb 2023 14:39:57 -0500 Subject: [PATCH] Added new module to tokenize prompts - Used to subtract prompt tokens from max tokens --- controllers/chatGptController.js | 6 ++++-- package.json | 3 ++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/controllers/chatGptController.js b/controllers/chatGptController.js index 0a2c3aa..59e4895 100644 --- a/controllers/chatGptController.js +++ b/controllers/chatGptController.js @@ -3,6 +3,7 @@ const log = new DebugBuilder("server", "chatGptController"); const { createTransaction } = require("./transactionController"); +const { encode } = require("gpt-3-encoder") const { Configuration, OpenAIApi } = require('openai'); const configuration = new Configuration({ organization: process.env.OPENAI_ORG, @@ -11,14 +12,15 @@ const configuration = new Configuration({ const openai = new OpenAIApi(configuration); - async function getGeneration(_prompt, callback, { _model = "text-davinci-003", _temperature = 0, _max_tokens = 100}) { // If the temperature is set to null _temperature = _temperature ?? 0; // If the tokens are set to null _max_tokens = _max_tokens ?? 100; - // TODO - Get the tokens in the message and subtract that from the max tokens to be sent to the AI + const encodedPrompt = encode(_prompt); + const promptTokens = encodedPrompt.length; + _max_tokens = _max_tokens - promptTokens; log.DEBUG("Getting chat with these properties: ", _prompt, _model, _temperature, _max_tokens) try{ diff --git a/package.json b/package.json index e12e787..501c9b7 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,8 @@ "ejs": "~2.6.1", "http-errors": "~1.6.3", "morgan": "~1.9.1", - "node-html-markdown": "~1.3.0" + "node-html-markdown": "~1.3.0", + "gpt-3-encoder": "~1.1.4" }, "scripts": { "test": "echo \"Error: no test specified\" && exit 1",