2023-05-11 09:03:03 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module AiBot
|
|
|
|
class Bot
|
2024-02-28 00:46:32 -05:00
|
|
|
attr_reader :model
|
|
|
|
|
2023-05-11 09:03:03 -04:00
|
|
|
BOT_NOT_FOUND = Class.new(StandardError)
|
2023-09-14 02:46:56 -04:00
|
|
|
MAX_COMPLETIONS = 5
|
2024-03-01 15:53:21 -05:00
|
|
|
MAX_TOOLS = 5
|
2023-05-11 09:03:03 -04:00
|
|
|
|
2024-02-28 00:46:32 -05:00
|
|
|
def self.as(bot_user, persona: DiscourseAi::AiBot::Personas::General.new, model: nil)
|
|
|
|
new(bot_user, persona, model)
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
|
2024-02-28 00:46:32 -05:00
|
|
|
def initialize(bot_user, persona, model = nil)
|
2023-05-11 09:03:03 -04:00
|
|
|
@bot_user = bot_user
|
2024-01-04 08:44:07 -05:00
|
|
|
@persona = persona
|
2024-02-28 00:46:32 -05:00
|
|
|
@model = model || self.class.guess_model(bot_user) || @persona.class.default_llm
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
attr_reader :bot_user
|
2024-02-15 00:37:59 -05:00
|
|
|
attr_accessor :persona
|
2023-05-23 09:08:17 -04:00
|
|
|
|
2024-03-01 15:53:21 -05:00
|
|
|
def get_updated_title(conversation_context, post)
|
2024-01-12 12:36:44 -05:00
|
|
|
system_insts = <<~TEXT.strip
|
|
|
|
You are titlebot. Given a topic, you will figure out a title.
|
|
|
|
You will never respond with anything but 7 word topic title.
|
2024-01-04 08:44:07 -05:00
|
|
|
TEXT
|
2023-05-20 03:45:54 -04:00
|
|
|
|
2024-01-12 12:36:44 -05:00
|
|
|
title_prompt =
|
2024-03-01 15:53:21 -05:00
|
|
|
DiscourseAi::Completions::Prompt.new(
|
|
|
|
system_insts,
|
|
|
|
messages: conversation_context,
|
|
|
|
topic_id: post.topic_id,
|
|
|
|
)
|
2024-01-12 12:36:44 -05:00
|
|
|
|
|
|
|
title_prompt.push(
|
|
|
|
type: :user,
|
|
|
|
content:
|
|
|
|
"Based on our previous conversation, suggest a 7 word title without quoting any of it.",
|
|
|
|
)
|
2024-01-04 08:44:07 -05:00
|
|
|
|
|
|
|
DiscourseAi::Completions::Llm
|
|
|
|
.proxy(model)
|
2024-03-01 15:53:21 -05:00
|
|
|
.generate(title_prompt, user: post.user)
|
2024-01-04 08:44:07 -05:00
|
|
|
.strip
|
|
|
|
.split("\n")
|
|
|
|
.last
|
|
|
|
end
|
|
|
|
|
|
|
|
def reply(context, &update_blk)
|
|
|
|
prompt = persona.craft_prompt(context)
|
|
|
|
|
|
|
|
total_completions = 0
|
|
|
|
ongoing_chain = true
|
|
|
|
raw_context = []
|
|
|
|
|
2024-02-02 15:09:34 -05:00
|
|
|
user = context[:user]
|
|
|
|
|
|
|
|
llm_kwargs = { user: user }
|
|
|
|
llm_kwargs[:temperature] = persona.temperature if persona.temperature
|
|
|
|
llm_kwargs[:top_p] = persona.top_p if persona.top_p
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
while total_completions <= MAX_COMPLETIONS && ongoing_chain
|
2024-02-28 00:46:32 -05:00
|
|
|
current_model = model
|
2024-01-04 08:44:07 -05:00
|
|
|
llm = DiscourseAi::Completions::Llm.proxy(current_model)
|
|
|
|
tool_found = false
|
|
|
|
|
2024-01-05 13:21:14 -05:00
|
|
|
result =
|
2024-02-02 15:09:34 -05:00
|
|
|
llm.generate(prompt, **llm_kwargs) do |partial, cancel|
|
2024-03-01 15:53:21 -05:00
|
|
|
tools = persona.find_tools(partial)
|
|
|
|
|
|
|
|
if (tools.present?)
|
2024-01-05 13:21:14 -05:00
|
|
|
tool_found = true
|
2024-03-01 15:53:21 -05:00
|
|
|
tools[0..MAX_TOOLS].each do |tool|
|
|
|
|
ongoing_chain &&= tool.chain_next_response?
|
|
|
|
process_tool(tool, raw_context, llm, cancel, update_blk, prompt)
|
2024-01-05 13:21:14 -05:00
|
|
|
end
|
2024-01-04 08:44:07 -05:00
|
|
|
else
|
2024-01-05 13:21:14 -05:00
|
|
|
update_blk.call(partial, cancel, nil)
|
2024-01-04 08:44:07 -05:00
|
|
|
end
|
2023-05-20 03:45:54 -04:00
|
|
|
end
|
2023-06-05 17:09:33 -04:00
|
|
|
|
2024-01-05 13:21:14 -05:00
|
|
|
if !tool_found
|
|
|
|
ongoing_chain = false
|
|
|
|
raw_context << [result, bot_user.username]
|
|
|
|
end
|
2024-01-04 08:44:07 -05:00
|
|
|
total_completions += 1
|
2023-08-21 18:36:41 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
# do not allow tools when we are at the end of a chain (total_completions == MAX_COMPLETIONS)
|
2024-01-12 12:36:44 -05:00
|
|
|
prompt.tools = [] if total_completions == MAX_COMPLETIONS
|
FEATURE: UI to update ai personas on admin page (#290)
Introduces a UI to manage customizable personas (admin only feature)
Part of the change was some extensive internal refactoring:
- AIBot now has a persona set in the constructor, once set it never changes
- Command now takes in bot as a constructor param, so it has the correct persona and is not generating AIBot objects on the fly
- Added a .prettierignore file, due to the way ALE is configured in nvim it is a pre-req for prettier to work
- Adds a bunch of validations on the AIPersona model, system personas (artist/creative etc...) are all seeded. We now ensure
- name uniqueness, and only allow certain properties to be touched for system personas.
- (JS note) the client side design takes advantage of nested routes, the parent route for personas gets all the personas via this.store.findAll("ai-persona") then child routes simply reach into this model to find a particular persona.
- (JS note) data is sideloaded into the ai-persona model the meta property supplied from the controller, resultSetMeta
- This removes ai_bot_enabled_personas and ai_bot_enabled_chat_commands, both should be controlled from the UI on a per persona basis
- Fixes a long standing bug in token accounting ... we were doing to_json.length instead of to_json.to_s.length
- Amended it so {commands} are always inserted at the end unconditionally, no need to add it to the template of the system message as it just confuses things
- Adds a concept of required_commands to stock personas, these are commands that must be configured for this stock persona to show up.
- Refactored tests so we stop requiring inference_stubs, it was very confusing to need it, added to plugin.rb for now which at least is clearer
- Migrates the persona selector to gjs
---------
Co-authored-by: Joffrey JAFFEUX <j.jaffeux@gmail.com>
Co-authored-by: Martin Brennan <martin@discourse.org>
2023-11-21 00:56:43 -05:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
raw_context
|
2023-05-16 13:38:21 -04:00
|
|
|
end
|
|
|
|
|
2024-01-05 13:21:14 -05:00
|
|
|
private
|
|
|
|
|
2024-03-01 15:53:21 -05:00
|
|
|
def process_tool(tool, raw_context, llm, cancel, update_blk, prompt)
|
|
|
|
tool_call_id = tool.tool_call_id
|
|
|
|
invocation_result_json = invoke_tool(tool, llm, cancel, &update_blk).to_json
|
|
|
|
|
|
|
|
tool_call_message = {
|
|
|
|
type: :tool_call,
|
|
|
|
id: tool_call_id,
|
2024-03-08 16:46:40 -05:00
|
|
|
content: { arguments: tool.parameters }.to_json,
|
|
|
|
name: tool.name,
|
2024-03-01 15:53:21 -05:00
|
|
|
}
|
|
|
|
|
2024-03-08 16:46:40 -05:00
|
|
|
tool_message = {
|
|
|
|
type: :tool,
|
|
|
|
id: tool_call_id,
|
|
|
|
content: invocation_result_json,
|
|
|
|
name: tool.name,
|
|
|
|
}
|
2024-03-01 15:53:21 -05:00
|
|
|
|
|
|
|
if tool.standalone?
|
|
|
|
standalone_context =
|
|
|
|
context.dup.merge(
|
|
|
|
conversation_context: [
|
|
|
|
context[:conversation_context].last,
|
|
|
|
tool_call_message,
|
|
|
|
tool_message,
|
|
|
|
],
|
|
|
|
)
|
|
|
|
prompt = persona.craft_prompt(standalone_context)
|
|
|
|
else
|
|
|
|
prompt.push(**tool_call_message)
|
|
|
|
prompt.push(**tool_message)
|
|
|
|
end
|
|
|
|
|
2024-03-08 16:46:40 -05:00
|
|
|
raw_context << [tool_call_message[:content], tool_call_id, "tool_call", tool.name]
|
|
|
|
raw_context << [invocation_result_json, tool_call_id, "tool", tool.name]
|
2024-03-01 15:53:21 -05:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
def invoke_tool(tool, llm, cancel, &update_blk)
|
|
|
|
update_blk.call("", cancel, build_placeholder(tool.summary, ""))
|
2023-05-20 03:45:54 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
result =
|
|
|
|
tool.invoke(bot_user, llm) do |progress|
|
|
|
|
placeholder = build_placeholder(tool.summary, progress)
|
|
|
|
update_blk.call("", cancel, placeholder)
|
|
|
|
end
|
2023-08-28 20:43:58 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
tool_details = build_placeholder(tool.summary, tool.details, custom_raw: tool.custom_raw)
|
|
|
|
update_blk.call(tool_details, cancel, nil)
|
2023-05-21 22:09:14 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
result
|
2023-05-23 09:08:17 -04:00
|
|
|
end
|
2023-05-11 09:03:03 -04:00
|
|
|
|
2024-02-28 00:46:32 -05:00
|
|
|
def self.guess_model(bot_user)
|
2024-01-29 14:04:25 -05:00
|
|
|
# HACK(roman): We'll do this until we define how we represent different providers in the bot settings
|
2024-02-28 00:46:32 -05:00
|
|
|
case bot_user.id
|
|
|
|
when DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID
|
|
|
|
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?("claude-2")
|
|
|
|
"aws_bedrock:claude-2"
|
2024-01-04 08:44:07 -05:00
|
|
|
else
|
2024-02-28 00:46:32 -05:00
|
|
|
"anthropic:claude-2"
|
2023-11-23 14:39:56 -05:00
|
|
|
end
|
2024-02-28 00:46:32 -05:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::GPT4_ID
|
|
|
|
"open_ai:gpt-4"
|
|
|
|
when DiscourseAi::AiBot::EntryPoint::GPT4_TURBO_ID
|
|
|
|
"open_ai:gpt-4-turbo"
|
|
|
|
when DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID
|
|
|
|
"open_ai:gpt-3.5-turbo-16k"
|
|
|
|
when DiscourseAi::AiBot::EntryPoint::MIXTRAL_ID
|
|
|
|
if DiscourseAi::Completions::Endpoints::Vllm.correctly_configured?(
|
|
|
|
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
|
|
)
|
|
|
|
"vllm:mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
|
|
else
|
|
|
|
"hugging_face:mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
|
|
end
|
|
|
|
when DiscourseAi::AiBot::EntryPoint::GEMINI_ID
|
|
|
|
"google:gemini-pro"
|
|
|
|
when DiscourseAi::AiBot::EntryPoint::FAKE_ID
|
|
|
|
"fake:fake"
|
2024-03-05 14:04:37 -05:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::CLAUDE_3_OPUS_ID
|
2024-04-17 01:37:19 -04:00
|
|
|
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?("claude-3-opus")
|
|
|
|
"aws_bedrock:claude-3-opus"
|
|
|
|
else
|
|
|
|
"anthropic:claude-3-opus"
|
|
|
|
end
|
2024-04-10 17:24:17 -04:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::COHERE_COMMAND_R_PLUS
|
|
|
|
"cohere:command-r-plus"
|
2024-03-05 14:04:37 -05:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::CLAUDE_3_SONNET_ID
|
2024-03-18 15:48:46 -04:00
|
|
|
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(
|
|
|
|
"claude-3-sonnet",
|
|
|
|
)
|
|
|
|
"aws_bedrock:claude-3-sonnet"
|
|
|
|
else
|
|
|
|
"anthropic:claude-3-sonnet"
|
|
|
|
end
|
2024-04-03 01:06:27 -04:00
|
|
|
when DiscourseAi::AiBot::EntryPoint::CLAUDE_3_HAIKU_ID
|
|
|
|
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?("claude-3-haiku")
|
|
|
|
"aws_bedrock:claude-3-haiku"
|
|
|
|
else
|
|
|
|
"anthropic:claude-3-haiku"
|
|
|
|
end
|
2024-02-28 00:46:32 -05:00
|
|
|
else
|
|
|
|
nil
|
2023-08-22 17:49:36 -04:00
|
|
|
end
|
2023-06-19 18:45:31 -04:00
|
|
|
end
|
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
def build_placeholder(summary, details, custom_raw: nil)
|
|
|
|
placeholder = +(<<~HTML)
|
|
|
|
<details>
|
|
|
|
<summary>#{summary}</summary>
|
|
|
|
<p>#{details}</p>
|
|
|
|
</details>
|
|
|
|
HTML
|
2023-05-20 03:45:54 -04:00
|
|
|
|
2024-01-09 07:20:28 -05:00
|
|
|
if custom_raw
|
|
|
|
placeholder << "\n"
|
|
|
|
placeholder << custom_raw
|
|
|
|
else
|
|
|
|
# we need this for cursor placeholder to work
|
|
|
|
# doing this in CSS is very hard
|
|
|
|
# if changing test with a custom tool such as search
|
|
|
|
placeholder << "<span></span>\n\n"
|
|
|
|
end
|
2023-05-11 09:03:03 -04:00
|
|
|
|
2024-01-04 08:44:07 -05:00
|
|
|
placeholder
|
2023-05-11 09:03:03 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|