Updated function with logging

This commit is contained in:
Logan Cusano
2023-02-26 14:57:02 -05:00
parent fcb478930d
commit 2b92b1dc1a

View File

@@ -20,7 +20,9 @@ async function getGeneration(_prompt, callback, { _model = "text-davinci-003", _
const encodedPrompt = encode(_prompt);
const promptTokens = encodedPrompt.length;
log.DEBUG("Tokens in prompt: ", promptTokens);
_max_tokens = _max_tokens - promptTokens;
log.DEBUG("Updated max tokens: ", _max_tokens);
log.DEBUG("Getting chat with these properties: ", _prompt, _model, _temperature, _max_tokens)
try{