2023-05-11 09:03:03 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module AiBot
|
|
|
|
class Bot
|
|
|
|
BOT_NOT_FOUND = Class.new(StandardError)
|
2023-09-14 02:46:56 -04:00
|
|
|
MAX_COMPLETIONS = 5
|
2023-05-11 09:03:03 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
def self.as(bot_user, persona: DiscourseAi::AiBot::Personas::General.new)
|
|
|
|
new(bot_user, persona)
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
def initialize(bot_user, persona)
|
2023-05-11 09:03:03 -04:00
|
|
|
@bot_user = bot_user
|
2024-01-04 08:44:07 -05:00
|
|
|
@persona = persona
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
attr_reader :bot_user
|
2023-05-23 09:08:17 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
def get_updated_title(conversation_context, post_user)
|
2024-01-12 12:36:44 -05:00
|
|
|
system_insts = <<~TEXT.strip
|
|
|
|
You are titlebot. Given a topic, you will figure out a title.
|
|
|
|
You will never respond with anything but 7 word topic title.
|
2024-01-04 08:44:07 -05:00
|
|
|
TEXT
|
2023-05-20 03:45:54 -04:00
|
|
|
|
2024-01-12 12:36:44 -05:00
|
|
|
title_prompt =
|
|
|
|
DiscourseAi::Completions::Prompt.new(system_insts, messages: conversation_context)
|
|
|
|
|
|
|
|
title_prompt.push(
|
|
|
|
type: :user,
|
|
|
|
content:
|
|
|
|
"Based on our previous conversation, suggest a 7 word title without quoting any of it.",
|
|
|
|
)
|
2024-01-04 08:44:07 -05:00
|
|
|
|
|
|
|
DiscourseAi::Completions::Llm
|
|
|
|
.proxy(model)
|
|
|
|
.generate(title_prompt, user: post_user)
|
|
|
|
.strip
|
|
|
|
.split("\n")
|
|
|
|
.last
|
|
|
|
end
|
|
|
|
|
|
|
|
def reply(context, &update_blk)
|
|
|
|
prompt = persona.craft_prompt(context)
|
|
|
|
|
|
|
|
total_completions = 0
|
|
|
|
ongoing_chain = true
|
|
|
|
low_cost = false
|
|
|
|
raw_context = []
|
|
|
|
|
|
|
|
while total_completions <= MAX_COMPLETIONS && ongoing_chain
|
|
|
|
current_model = model(prefer_low_cost: low_cost)
|
|
|
|
llm = DiscourseAi::Completions::Llm.proxy(current_model)
|
|
|
|
tool_found = false
|
|
|
|
|
2024-01-05 13:21:14 -05:00
|
|
|
result =
|
|
|
|
llm.generate(prompt, user: context[:user]) do |partial, cancel|
|
|
|
|
if (tool = persona.find_tool(partial))
|
|
|
|
tool_found = true
|
|
|
|
ongoing_chain = tool.chain_next_response?
|
|
|
|
low_cost = tool.low_cost?
|
|
|
|
tool_call_id = tool.tool_call_id
|
|
|
|
invocation_result_json = invoke_tool(tool, llm, cancel, &update_blk).to_json
|
|
|
|
|
2024-01-12 12:36:44 -05:00
|
|
|
tool_call_message = {
|
|
|
|
type: :tool_call,
|
|
|
|
id: tool_call_id,
|
2024-01-05 13:21:14 -05:00
|
|
|
content: { name: tool.name, arguments: tool.parameters }.to_json,
|
|
|
|
}
|
|
|
|
|
2024-01-12 12:36:44 -05:00
|
|
|
tool_message = { type: :tool, id: tool_call_id, content: invocation_result_json }
|
2024-01-05 13:21:14 -05:00
|
|
|
|
|
|
|
if tool.standalone?
|
2024-01-19 06:51:26 -05:00
|
|
|
standalone_context =
|
2024-01-12 12:36:44 -05:00
|
|
|
context.dup.merge(
|
|
|
|
conversation_context: [
|
|
|
|
context[:conversation_context].last,
|
|
|
|
tool_call_message,
|
|
|
|
tool_message,
|
|
|
|
],
|
|
|
|
)
|
2024-01-19 06:51:26 -05:00
|
|
|
prompt = persona.craft_prompt(standalone_context)
|
2024-01-05 13:21:14 -05:00
|
|
|
else
|
2024-01-12 12:36:44 -05:00
|
|
|
prompt.push(**tool_call_message)
|
|
|
|
prompt.push(**tool_message)
|
2024-01-05 13:21:14 -05:00
|
|
|
end
|
|
|
|
|
2024-01-12 12:36:44 -05:00
|
|
|
raw_context << [tool_call_message[:content], tool_call_id, "tool_call"]
|
2024-01-05 13:21:14 -05:00
|
|
|
raw_context << [invocation_result_json, tool_call_id, "tool"]
|
2024-01-04 08:44:07 -05:00
|
|
|
else
|
2024-01-05 13:21:14 -05:00
|
|
|
update_blk.call(partial, cancel, nil)
|
2024-01-04 08:44:07 -05:00
|
|
|
end
|
2023-05-20 03:45:54 -04:00
|
|
|
end
|
2023-06-05 17:09:33 -04:00
|
|
|
|
2024-01-05 13:21:14 -05:00
|
|
|
if !tool_found
|
|
|
|
ongoing_chain = false
|
|
|
|
raw_context << [result, bot_user.username]
|
|
|
|
end
|
2024-01-04 08:44:07 -05:00
|
|
|
total_completions += 1
|
2023-08-21 18:36:41 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
# do not allow tools when we are at the end of a chain (total_completions == MAX_COMPLETIONS)
|
2024-01-12 12:36:44 -05:00
|
|
|
prompt.tools = [] if total_completions == MAX_COMPLETIONS
|
FEATURE: UI to update ai personas on admin page (#290)
Introduces a UI to manage customizable personas (admin only feature)
Part of the change was some extensive internal refactoring:
- AIBot now has a persona set in the constructor, once set it never changes
- Command now takes in bot as a constructor param, so it has the correct persona and is not generating AIBot objects on the fly
- Added a .prettierignore file, due to the way ALE is configured in nvim it is a pre-req for prettier to work
- Adds a bunch of validations on the AIPersona model, system personas (artist/creative etc...) are all seeded. We now ensure
- name uniqueness, and only allow certain properties to be touched for system personas.
- (JS note) the client side design takes advantage of nested routes, the parent route for personas gets all the personas via this.store.findAll("ai-persona") then child routes simply reach into this model to find a particular persona.
- (JS note) data is sideloaded into the ai-persona model the meta property supplied from the controller, resultSetMeta
- This removes ai_bot_enabled_personas and ai_bot_enabled_chat_commands, both should be controlled from the UI on a per persona basis
- Fixes a long standing bug in token accounting ... we were doing to_json.length instead of to_json.to_s.length
- Amended it so {commands} are always inserted at the end unconditionally, no need to add it to the template of the system message as it just confuses things
- Adds a concept of required_commands to stock personas, these are commands that must be configured for this stock persona to show up.
- Refactored tests so we stop requiring inference_stubs, it was very confusing to need it, added to plugin.rb for now which at least is clearer
- Migrates the persona selector to gjs
---------
Co-authored-by: Joffrey JAFFEUX <j.jaffeux@gmail.com>
Co-authored-by: Martin Brennan <martin@discourse.org>
2023-11-21 00:56:43 -05:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
raw_context
|
2023-05-16 13:38:21 -04:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
attr_reader :persona
|
2023-05-20 03:45:54 -04:00
|
|
|
|
2024-01-05 13:21:14 -05:00
|
|
|
private
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
def invoke_tool(tool, llm, cancel, &update_blk)
|
|
|
|
update_blk.call("", cancel, build_placeholder(tool.summary, ""))
|
2023-05-20 03:45:54 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
result =
|
|
|
|
tool.invoke(bot_user, llm) do |progress|
|
|
|
|
placeholder = build_placeholder(tool.summary, progress)
|
|
|
|
update_blk.call("", cancel, placeholder)
|
|
|
|
end
|
2023-08-28 20:43:58 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
tool_details = build_placeholder(tool.summary, tool.details, custom_raw: tool.custom_raw)
|
|
|
|
update_blk.call(tool_details, cancel, nil)
|
2023-05-21 22:09:14 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
result
|
2023-05-23 09:08:17 -04:00
|
|
|
end
|
2023-05-11 09:03:03 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
def model(prefer_low_cost: false)
|
2024-01-29 14:04:25 -05:00
|
|
|
# HACK(roman): We'll do this until we define how we represent different providers in the bot settings
|
2024-01-04 08:44:07 -05:00
|
|
|
default_model =
|
|
|
|
case bot_user.id
|
|
|
|
when DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID
|
2024-01-29 14:04:25 -05:00
|
|
|
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?("claude-2")
|
|
|
|
"aws_bedrock:claude-2"
|
|
|
|
else
|
|
|
|
"anthropic:claude-2"
|
|
|
|
end
|
2024-01-04 08:44:07 -05:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::GPT4_ID
|
2024-01-29 14:04:25 -05:00
|
|
|
"open_ai:gpt-4"
|
2024-01-04 08:44:07 -05:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::GPT4_TURBO_ID
|
2024-01-29 14:04:25 -05:00
|
|
|
"open_ai:gpt-4-turbo"
|
2024-01-04 08:44:07 -05:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID
|
2024-01-29 14:04:25 -05:00
|
|
|
"open_ai:gpt-3.5-turbo-16k"
|
2024-01-04 10:22:43 -05:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::MIXTRAL_ID
|
2024-01-29 14:04:25 -05:00
|
|
|
if DiscourseAi::Completions::Endpoints::Vllm.correctly_configured?(
|
|
|
|
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
|
|
)
|
|
|
|
"vllm:mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
|
|
else
|
|
|
|
"hugging_face:mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
|
|
end
|
2024-01-04 16:15:34 -05:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::GEMINI_ID
|
2024-01-29 14:04:25 -05:00
|
|
|
"google:gemini-pro"
|
2024-01-10 23:56:40 -05:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::FAKE_ID
|
2024-01-29 14:04:25 -05:00
|
|
|
"fake:fake"
|
2024-01-04 08:44:07 -05:00
|
|
|
else
|
|
|
|
nil
|
2023-11-23 14:39:56 -05:00
|
|
|
end
|
|
|
|
|
2024-01-29 14:04:25 -05:00
|
|
|
if %w[open_ai:gpt-4 open_ai:gpt-4-turbo].include?(default_model) && prefer_low_cost
|
|
|
|
return "open_ai:gpt-3.5-turbo-16k"
|
2023-08-22 17:49:36 -04:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
default_model
|
2023-06-19 18:45:31 -04:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
def tool_invocation?(partial)
|
|
|
|
Nokogiri::HTML5.fragment(partial).at("invoke").present?
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
def build_placeholder(summary, details, custom_raw: nil)
|
|
|
|
placeholder = +(<<~HTML)
|
|
|
|
<details>
|
|
|
|
<summary>#{summary}</summary>
|
|
|
|
<p>#{details}</p>
|
|
|
|
</details>
|
|
|
|
HTML
|
2023-05-20 03:45:54 -04:00
|
|
|
|
2024-01-09 07:20:28 -05:00
|
|
|
if custom_raw
|
|
|
|
placeholder << "\n"
|
|
|
|
placeholder << custom_raw
|
|
|
|
else
|
|
|
|
# we need this for cursor placeholder to work
|
|
|
|
# doing this in CSS is very hard
|
|
|
|
# if changing test with a custom tool such as search
|
|
|
|
placeholder << "<span></span>\n\n"
|
|
|
|
end
|
2023-05-11 09:03:03 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
placeholder
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|