2023-05-11 09:03:03 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module AiBot
|
|
|
|
class OpenAiBot < Bot
|
|
|
|
def self.can_reply_as?(bot_user)
|
|
|
|
open_ai_bot_ids = [
|
|
|
|
DiscourseAi::AiBot::EntryPoint::GPT4_ID,
|
|
|
|
DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID,
|
|
|
|
]
|
|
|
|
|
|
|
|
open_ai_bot_ids.include?(bot_user.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
def prompt_limit
|
2023-05-20 03:45:54 -04:00
|
|
|
# note GPT counts both reply and request tokens in limits...
|
|
|
|
# also allow for an extra 500 or so spare tokens
|
2023-06-22 20:02:04 -04:00
|
|
|
#
|
|
|
|
# 2500 are the max reply tokens
|
2023-08-16 21:00:11 -04:00
|
|
|
# Then we have 450 or so for the full function suite
|
2023-06-22 20:02:04 -04:00
|
|
|
# 100 additional for growth around function calls
|
2023-05-20 03:45:54 -04:00
|
|
|
if bot_user.id == DiscourseAi::AiBot::EntryPoint::GPT4_ID
|
2023-08-16 21:00:11 -04:00
|
|
|
8192 - 3050
|
2023-05-20 03:45:54 -04:00
|
|
|
else
|
2023-08-16 21:00:11 -04:00
|
|
|
16_384 - 3050
|
2023-05-20 03:45:54 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def reply_params
|
2023-06-22 20:02:04 -04:00
|
|
|
# technically we could allow GPT-3.5 16k more tokens
|
|
|
|
# but lets just keep it here for now
|
|
|
|
{ temperature: 0.4, top_p: 0.9, max_tokens: 2500 }
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
|
2023-05-21 22:09:14 -04:00
|
|
|
def submit_prompt(
|
|
|
|
prompt,
|
|
|
|
prefer_low_cost: false,
|
|
|
|
temperature: nil,
|
|
|
|
top_p: nil,
|
|
|
|
max_tokens: nil,
|
|
|
|
&blk
|
|
|
|
)
|
|
|
|
params =
|
|
|
|
reply_params.merge(
|
|
|
|
temperature: temperature,
|
|
|
|
top_p: top_p,
|
|
|
|
max_tokens: max_tokens,
|
|
|
|
) { |key, old_value, new_value| new_value.nil? ? old_value : new_value }
|
|
|
|
|
2023-06-19 18:45:31 -04:00
|
|
|
model = model_for(low_cost: prefer_low_cost)
|
|
|
|
|
2023-06-21 03:10:30 -04:00
|
|
|
params[:functions] = available_functions if available_functions.present?
|
|
|
|
|
2023-05-21 22:09:14 -04:00
|
|
|
DiscourseAi::Inference::OpenAiCompletions.perform!(prompt, model, **params, &blk)
|
|
|
|
end
|
|
|
|
|
|
|
|
def tokenize(text)
|
|
|
|
DiscourseAi::Tokenizer::OpenAiTokenizer.tokenize(text)
|
|
|
|
end
|
|
|
|
|
2023-06-19 18:45:31 -04:00
|
|
|
def available_functions
|
|
|
|
# note if defined? can be a problem in test
|
|
|
|
# this can never be nil so it is safe
|
|
|
|
return @available_functions if @available_functions
|
|
|
|
|
|
|
|
functions = []
|
|
|
|
|
|
|
|
functions =
|
|
|
|
available_commands.map do |command|
|
|
|
|
function =
|
|
|
|
DiscourseAi::Inference::OpenAiCompletions::Function.new(
|
|
|
|
name: command.name,
|
|
|
|
description: command.desc,
|
|
|
|
)
|
|
|
|
command.parameters.each do |parameter|
|
|
|
|
function.add_parameter(
|
|
|
|
name: parameter.name,
|
|
|
|
type: parameter.type,
|
|
|
|
description: parameter.description,
|
|
|
|
required: parameter.required,
|
|
|
|
)
|
|
|
|
end
|
|
|
|
function
|
|
|
|
end
|
|
|
|
|
|
|
|
@available_functions = functions
|
|
|
|
end
|
|
|
|
|
2023-05-31 19:10:33 -04:00
|
|
|
def available_commands
|
2023-06-21 03:10:30 -04:00
|
|
|
return @cmds if @cmds
|
|
|
|
|
|
|
|
all_commands =
|
2023-06-19 18:45:31 -04:00
|
|
|
[
|
|
|
|
Commands::CategoriesCommand,
|
|
|
|
Commands::TimeCommand,
|
|
|
|
Commands::SearchCommand,
|
2023-06-21 03:10:30 -04:00
|
|
|
Commands::SummarizeCommand,
|
2023-08-08 17:19:56 -04:00
|
|
|
Commands::ReadCommand,
|
2023-06-19 18:45:31 -04:00
|
|
|
].tap do |cmds|
|
|
|
|
cmds << Commands::TagsCommand if SiteSetting.tagging_enabled
|
|
|
|
cmds << Commands::ImageCommand if SiteSetting.ai_stability_api_key.present?
|
|
|
|
if SiteSetting.ai_google_custom_search_api_key.present? &&
|
|
|
|
SiteSetting.ai_google_custom_search_cx.present?
|
|
|
|
cmds << Commands::GoogleCommand
|
2023-05-31 19:10:33 -04:00
|
|
|
end
|
2023-06-19 18:45:31 -04:00
|
|
|
end
|
2023-06-21 03:10:30 -04:00
|
|
|
|
|
|
|
allowed_commands = SiteSetting.ai_bot_enabled_chat_commands.split("|")
|
|
|
|
@cmds = all_commands.filter { |klass| allowed_commands.include?(klass.name) }
|
2023-06-19 18:45:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def model_for(low_cost: false)
|
2023-08-16 21:00:11 -04:00
|
|
|
return "gpt-4" if bot_user.id == DiscourseAi::AiBot::EntryPoint::GPT4_ID && !low_cost
|
2023-06-19 18:45:31 -04:00
|
|
|
"gpt-3.5-turbo-16k"
|
2023-05-31 19:10:33 -04:00
|
|
|
end
|
|
|
|
|
2023-06-20 01:44:03 -04:00
|
|
|
def clean_username(username)
|
|
|
|
if username.match?(/\0[a-zA-Z0-9_-]{1,64}\z/)
|
|
|
|
username
|
|
|
|
else
|
|
|
|
# not the best in the world, but this is what we have to work with
|
|
|
|
# if sites enable unicode usernames this can get messy
|
|
|
|
username.gsub(/[^a-zA-Z0-9_-]/, "_")[0..63]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-05-11 09:03:03 -04:00
|
|
|
private
|
|
|
|
|
2023-06-19 18:45:31 -04:00
|
|
|
def populate_functions(partial, functions)
|
|
|
|
fn = partial.dig(:choices, 0, :delta, :function_call)
|
|
|
|
if fn
|
|
|
|
functions.add_function(fn[:name]) if fn[:name].present?
|
|
|
|
functions.add_argument_fragment(fn[:arguments]) if fn[:arguments].present?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def build_message(poster_username, content, function: false, system: false)
|
2023-05-20 03:45:54 -04:00
|
|
|
is_bot = poster_username == bot_user.username
|
|
|
|
|
2023-06-19 18:45:31 -04:00
|
|
|
if function
|
|
|
|
role = "function"
|
|
|
|
elsif system
|
2023-05-20 03:45:54 -04:00
|
|
|
role = "system"
|
|
|
|
else
|
|
|
|
role = is_bot ? "assistant" : "user"
|
|
|
|
end
|
2023-05-11 09:03:03 -04:00
|
|
|
|
2023-06-19 18:45:31 -04:00
|
|
|
result = { role: role, content: content }
|
|
|
|
|
|
|
|
if function
|
|
|
|
result[:name] = poster_username
|
2023-06-20 01:44:03 -04:00
|
|
|
elsif !system && poster_username != bot_user.username && poster_username.present?
|
2023-06-19 18:45:31 -04:00
|
|
|
# Open AI restrict name to 64 chars and only A-Za-z._ (work around)
|
2023-06-20 01:44:03 -04:00
|
|
|
result[:name] = clean_username(poster_username)
|
2023-06-19 18:45:31 -04:00
|
|
|
end
|
2023-05-11 09:03:03 -04:00
|
|
|
|
2023-06-19 18:45:31 -04:00
|
|
|
result
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
|
2023-05-23 09:08:17 -04:00
|
|
|
def get_delta(partial, _context)
|
|
|
|
partial.dig(:choices, 0, :delta, :content).to_s
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
|
2023-05-16 13:38:21 -04:00
|
|
|
def get_updated_title(prompt)
|
|
|
|
DiscourseAi::Inference::OpenAiCompletions.perform!(
|
|
|
|
prompt,
|
|
|
|
model_for,
|
|
|
|
temperature: 0.7,
|
|
|
|
top_p: 0.9,
|
|
|
|
max_tokens: 40,
|
|
|
|
).dig(:choices, 0, :message, :content)
|
|
|
|
end
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|