mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-03-06 09:20:14 +00:00
FIX: improve token counting (#234)
We were running out of tokens under certain conditions (really long chains) Add more buffer.
This commit is contained in:
parent
237e9478df
commit
ed7d1f06d1
@ -243,6 +243,7 @@ discourse_ai:
|
||||
- sql_helper
|
||||
- settings_explorer
|
||||
- researcher
|
||||
- creative
|
||||
ai_bot_add_to_header:
|
||||
default: true
|
||||
client: true
|
||||
|
@ -16,9 +16,9 @@ module DiscourseAi
|
||||
# note this is about 100 tokens over, OpenAI have a more optimal representation
|
||||
@function_size ||= tokenize(available_functions.to_json).length
|
||||
|
||||
# provide a buffer of 80 tokens - our function counting is not
|
||||
# 100% accurate so this is a trial and error number
|
||||
buffer = @function_size + reply_params[:max_tokens] + 80
|
||||
# provide a buffer of 120 tokens - our function counting is not
|
||||
# 100% accurate and getting numbers to align exactly is very hard
|
||||
buffer = @function_size + reply_params[:max_tokens] + 120
|
||||
|
||||
if bot_user.id == DiscourseAi::AiBot::EntryPoint::GPT4_ID
|
||||
8192 - buffer
|
||||
|
Loading…
x
Reference in New Issue
Block a user