DEV: Rewire AI bot internals to use LlmModel (#638)

* DRAFT: Create AI Bot users dynamically and support custom LlmModels

* Get user associated to llm_model

* Track enabled bots with attribute

* Don't store bot username. Minor touches to migrate default values in settings

* Handle scenario where vLLM uses a SRV record

* Made 3.5-turbo-16k the default version so we can remove hack
This commit is contained in:
Roman Rizzi 2024-06-18 14:32:14 -03:00 committed by GitHub
parent cc0b222faa
commit 8d5f901a67
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
56 changed files with 823 additions and 383 deletions

View File

@ -43,6 +43,7 @@ module DiscourseAi
llm_model = LlmModel.find(params[:id])
if llm_model.update(ai_llm_params)
llm_model.toggle_companion_user
render json: llm_model
else
render_json_error llm_model
@ -106,6 +107,7 @@ module DiscourseAi
:max_prompt_tokens,
:url,
:api_key,
:enabled_chat_bot,
)
end
end

View File

@ -31,12 +31,10 @@ module DiscourseAi
end
def show_bot_username
bot_user_id = DiscourseAi::AiBot::EntryPoint.map_bot_model_to_user_id(params[:username])
raise Discourse::InvalidParameters.new(:username) if !bot_user_id
bot_user = DiscourseAi::AiBot::EntryPoint.find_user_from_model(params[:username])
raise Discourse::InvalidParameters.new(:username) if !bot_user
bot_username_lower = User.find(bot_user_id).username_lower
render json: { bot_username: bot_username_lower }, status: 200
render json: { bot_username: bot_user.username_lower }, status: 200
end
end
end

View File

@ -252,40 +252,32 @@ end
#
# Table name: ai_personas
#
# id :bigint not null, primary key
# name :string(100) not null
# description :string(2000) not null
# tools :json not null
# system_prompt :string(10000000) not null
# allowed_group_ids :integer default([]), not null, is an Array
# created_by_id :integer
# enabled :boolean default(TRUE), not null
# created_at :datetime not null
# updated_at :datetime not null
# system :boolean default(FALSE), not null
# priority :boolean default(FALSE), not null
# temperature :float
# top_p :float
# user_id :integer
# mentionable :boolean default(FALSE), not null
# default_llm :text
# max_context_posts :integer
# max_post_context_tokens :integer
# max_context_tokens :integer
# vision_enabled :boolean default(FALSE), not null
# vision_max_pixels :integer default(1048576), not null
# rag_chunk_tokens :integer default(374), not null
# rag_chunk_overlap_tokens :integer default(10), not null
# rag_conversation_chunks :integer default(10), not null
# role :enum default("bot"), not null
# role_category_ids :integer default([]), not null, is an Array
# role_tags :string default([]), not null, is an Array
# role_group_ids :integer default([]), not null, is an Array
# role_whispers :boolean default(FALSE), not null
# role_max_responses_per_hour :integer default(50), not null
# question_consolidator_llm :text
# allow_chat :boolean default(FALSE), not null
# tool_details :boolean default(TRUE), not null
# id :bigint not null, primary key
# name :string(100) not null
# description :string(2000) not null
# system_prompt :string(10000000) not null
# allowed_group_ids :integer default([]), not null, is an Array
# created_by_id :integer
# enabled :boolean default(TRUE), not null
# created_at :datetime not null
# updated_at :datetime not null
# system :boolean default(FALSE), not null
# priority :boolean default(FALSE), not null
# temperature :float
# top_p :float
# user_id :integer
# mentionable :boolean default(FALSE), not null
# default_llm :text
# max_context_posts :integer
# vision_enabled :boolean default(FALSE), not null
# vision_max_pixels :integer default(1048576), not null
# rag_chunk_tokens :integer default(374), not null
# rag_chunk_overlap_tokens :integer default(10), not null
# rag_conversation_chunks :integer default(10), not null
# question_consolidator_llm :text
# allow_chat :boolean default(FALSE), not null
# tool_details :boolean default(TRUE), not null
# tools :json not null
#
# Indexes
#

View File

@ -6,7 +6,7 @@ end
# == Schema Information
#
# Table name: message_custom_prompts
# Table name: chat_message_custom_prompts
#
# id :bigint not null, primary key
# message_id :bigint not null
@ -16,5 +16,5 @@ end
#
# Indexes
#
# index_message_custom_prompts_on_message_id (message_id) UNIQUE
# index_chat_message_custom_prompts_on_message_id (message_id) UNIQUE
#

View File

@ -1,6 +1,75 @@
# frozen_string_literal: true
class LlmModel < ActiveRecord::Base
FIRST_BOT_USER_ID = -1200
RESERVED_VLLM_SRV_URL = "https://vllm.shadowed-by-srv.invalid"
belongs_to :user
validates :url, exclusion: { in: [RESERVED_VLLM_SRV_URL] }
def self.enable_or_disable_srv_llm!
srv_model = find_by(url: RESERVED_VLLM_SRV_URL)
if SiteSetting.ai_vllm_endpoint_srv.present? && srv_model.blank?
record =
new(
display_name: "vLLM SRV LLM",
name: "mistralai/Mixtral",
provider: "vllm",
tokenizer: "DiscourseAi::Tokenizer::MixtralTokenizer",
url: RESERVED_VLLM_SRV_URL,
vllm_key: "",
user_id: nil,
enabled_chat_bot: false,
)
record.save(validate: false) # Ignore reserved URL validation
elsif srv_model.present?
srv_model.destroy!
end
end
def toggle_companion_user
return if name == "fake" && Rails.env.production?
enable_check = SiteSetting.ai_bot_enabled && enabled_chat_bot
if enable_check
if !user
next_id = DB.query_single(<<~SQL).first
SELECT min(id) - 1 FROM users
SQL
new_user =
User.new(
id: [FIRST_BOT_USER_ID, next_id].min,
email: "no_email_#{name.underscore}",
name: name.titleize,
username: UserNameSuggester.suggest(name),
active: true,
approved: true,
admin: true,
moderator: true,
trust_level: TrustLevel[4],
)
new_user.save!(validate: false)
self.update!(user: new_user)
else
user.update!(active: true)
end
elsif user
# will include deleted
has_posts = DB.query_single("SELECT 1 FROM posts WHERE user_id = #{user.id} LIMIT 1").present?
if has_posts
user.update!(active: false) if user.active
else
user.destroy!
self.update!(user: nil)
end
end
end
def tokenizer_class
tokenizer.constantize
end
@ -20,4 +89,6 @@ end
# updated_at :datetime not null
# url :string
# api_key :string
# user_id :integer
# enabled_chat_bot :boolean default(FALSE), not null
#

View File

@ -133,12 +133,10 @@ class SharedAiConversation < ActiveRecord::Base
end
def self.build_conversation_data(topic, max_posts: DEFAULT_MAX_POSTS, include_usernames: false)
llm_name = nil
topic.topic_allowed_users.each do |tu|
if DiscourseAi::AiBot::EntryPoint::BOT_USER_IDS.include?(tu.user_id)
llm_name = DiscourseAi::AiBot::EntryPoint.find_bot_by_id(tu.user_id)&.llm
end
end
allowed_user_ids = topic.topic_allowed_users.pluck(:user_id)
ai_bot_participant = DiscourseAi::AiBot::EntryPoint.find_participant_in(allowed_user_ids)
llm_name = ai_bot_participant&.llm
llm_name = ActiveSupport::Inflector.humanize(llm_name) if llm_name
llm_name ||= I18n.t("discourse_ai.unknown_model")
@ -170,9 +168,7 @@ class SharedAiConversation < ActiveRecord::Base
cooked: post.cooked,
}
mapped[:persona] = persona if ::DiscourseAi::AiBot::EntryPoint::BOT_USER_IDS.include?(
post.user_id,
)
mapped[:persona] = persona if ai_bot_participant&.id == post.user_id
mapped[:username] = post.user&.username if include_usernames
mapped
end,

View File

@ -3,5 +3,18 @@
class LlmModelSerializer < ApplicationSerializer
root "llm"
attributes :id, :display_name, :name, :provider, :max_prompt_tokens, :tokenizer, :api_key, :url
attributes :id,
:display_name,
:name,
:provider,
:max_prompt_tokens,
:tokenizer,
:api_key,
:url,
:enabled_chat_bot,
:url_editable
def url_editable
object.url != LlmModel::RESERVED_VLLM_SRV_URL
end
end

View File

@ -11,7 +11,8 @@ export default class AiLlm extends RestModel {
"tokenizer",
"max_prompt_tokens",
"url",
"api_key"
"api_key",
"enabled_chat_bot"
);
}

View File

@ -1,19 +1,25 @@
import Component from "@glimmer/component";
import { action } from "@ember/object";
import { service } from "@ember/service";
import { gt } from "truth-helpers";
import DButton from "discourse/components/d-button";
import i18n from "discourse-common/helpers/i18n";
import { composeAiBotMessage } from "../lib/ai-bot-helper";
export default class AiBotHeaderIcon extends Component {
@service currentUser;
@service siteSettings;
@service composer;
get bots() {
return this.siteSettings.ai_bot_add_to_header
? this.siteSettings.ai_bot_enabled_chat_bots.split("|").filter(Boolean)
: [];
const availableBots = this.currentUser.ai_enabled_chat_bots
.filter((bot) => !bot.is_persosna)
.filter(Boolean);
return availableBots ? availableBots.map((bot) => bot.model_name) : [];
}
get showHeaderButton() {
return this.bots.length > 0 && this.siteSettings.ai_bot_add_to_header;
}
@action
@ -22,7 +28,7 @@ export default class AiBotHeaderIcon extends Component {
}
<template>
{{#if (gt this.bots.length 0)}}
{{#if this.showHeaderButton}}
<li>
<DButton
@action={{this.compose}}

View File

@ -1,11 +1,13 @@
import Component from "@glimmer/component";
import { tracked } from "@glimmer/tracking";
import { Input } from "@ember/component";
import { on } from "@ember/modifier";
import { action } from "@ember/object";
import { later } from "@ember/runloop";
import { inject as service } from "@ember/service";
import BackButton from "discourse/components/back-button";
import DButton from "discourse/components/d-button";
import DToggleSwitch from "discourse/components/d-toggle-switch";
import { popupAjaxError } from "discourse/lib/ajax-error";
import icon from "discourse-common/helpers/d-icon";
import i18n from "discourse-common/helpers/i18n";
@ -110,11 +112,31 @@ export default class AiLlmEditor extends Component {
});
}
@action
async toggleEnabledChatBot() {
this.args.model.set("enabled_chat_bot", !this.args.model.enabled_chat_bot);
if (!this.args.model.isNew) {
try {
await this.args.model.update({
enabled_chat_bot: this.args.model.enabled_chat_bot,
});
} catch (e) {
popupAjaxError(e);
}
}
}
<template>
<BackButton
@route="adminPlugins.show.discourse-ai-llms"
@label="discourse_ai.llms.back"
/>
{{#unless @model.url_editable}}
<div class="alert alert-info">
{{icon "exclamation-circle"}}
{{I18n.t "discourse_ai.llms.srv_warning"}}
</div>
{{/unless}}
<form class="form-horizontal ai-llm-editor">
<div class="control-group">
<label>{{i18n "discourse_ai.llms.display_name"}}</label>
@ -143,14 +165,16 @@ export default class AiLlmEditor extends Component {
@content={{this.selectedProviders}}
/>
</div>
<div class="control-group">
<label>{{I18n.t "discourse_ai.llms.url"}}</label>
<Input
class="ai-llm-editor-input ai-llm-editor__url"
@type="text"
@value={{@model.url}}
/>
</div>
{{#if @model.url_editable}}
<div class="control-group">
<label>{{I18n.t "discourse_ai.llms.url"}}</label>
<Input
class="ai-llm-editor-input ai-llm-editor__url"
@type="text"
@value={{@model.url}}
/>
</div>
{{/if}}
<div class="control-group">
<label>{{I18n.t "discourse_ai.llms.api_key"}}</label>
<Input
@ -181,7 +205,14 @@ export default class AiLlmEditor extends Component {
@content={{I18n.t "discourse_ai.llms.hints.max_prompt_tokens"}}
/>
</div>
<div class="control-group">
<DToggleSwitch
class="ai-llm-editor__enabled-chat-bot"
@state={{@model.enabled_chat_bot}}
@label="discourse_ai.llms.enabled_chat_bot"
{{on "click" this.toggleEnabledChatBot}}
/>
</div>
<div class="control-group ai-llm-editor__action_panel">
<DButton
class="ai-llm-editor__test"

View File

@ -4,7 +4,6 @@ import { hash } from "@ember/helper";
import { next } from "@ember/runloop";
import { inject as service } from "@ember/service";
import KeyValueStore from "discourse/lib/key-value-store";
import I18n from "I18n";
import DropdownSelectBox from "select-kit/components/dropdown-select-box";
function isBotMessage(composer, currentUser) {
@ -110,15 +109,16 @@ export default class BotSelector extends Component {
}
get llmOptions() {
return this.siteSettings.ai_bot_enabled_chat_bots
.split("|")
.filter(Boolean)
.map((bot) => {
return {
id: bot,
name: I18n.t(`discourse_ai.ai_bot.bot_names.${bot}`),
};
});
const availableBots = this.currentUser.ai_enabled_chat_bots
.filter((bot) => !bot.is_persosna)
.filter(Boolean);
return availableBots.map((bot) => {
return {
id: bot.model_name,
name: bot.display_name,
};
});
}
<template>

View File

@ -210,6 +210,7 @@ en:
max_prompt_tokens: "Number of tokens for the prompt"
url: "URL of the service hosting the model"
api_key: "API Key of the service hosting the model"
enabled_chat_bot: "Allow Companion user to act as an AI Bot"
save: "Save"
edit: "Edit"
saved: "LLM Model Saved"
@ -217,6 +218,8 @@ en:
confirm_delete: Are you sure you want to delete this model?
delete: Delete
srv_warning: This LLM points to an SRV record, and its URL is not editable. You have to update the hidden "ai_vllm_endpoint_srv" setting instead.
tests:
title: "Run Test"
running: "Running test..."

View File

@ -342,7 +342,6 @@ discourse_ai:
type: group_list
list_type: compact
default: "3|14" # 3: @staff, 14: @trust_level_4
# Adding a new bot? Make sure to create a user for it on the seed file and update translations.
ai_bot_public_sharing_allowed_groups:
client: false
type: group_list
@ -350,22 +349,11 @@ discourse_ai:
default: "1|2" # 1: admins, 2: moderators
allow_any: false
refresh: true
ai_bot_enabled_chat_bots:
ai_bot_enabled_chat_bots: # TODO(roman): Remove setting. Deprecated
type: list
default: "gpt-3.5-turbo"
client: true
choices:
- gpt-3.5-turbo
- gpt-4
- gpt-4-turbo
- gpt-4o
- claude-2
- gemini-1.5-pro
- mixtral-8x7B-Instruct-V0.1
- claude-3-opus
- claude-3-sonnet
- claude-3-haiku
- cohere-command-r-plus
hidden: true
choices: "DiscourseAi::Configuration::LlmEnumerator.available_ai_bots"
ai_bot_add_to_header:
default: true
client: true

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
class AddCompanionUserToLlmModel < ActiveRecord::Migration[7.0]
def change
add_column :llm_models, :user_id, :integer
add_column :llm_models, :enabled_chat_bot, :boolean, null: false, default: false
end
end

View File

@ -0,0 +1,108 @@
# frozen_string_literal: true
class SeedOpenAiModels < ActiveRecord::Migration[7.0]
def up
models = []
open_ai_api_key = fetch_setting("ai_openai_api_key")
enabled_models = fetch_setting("ai_bot_enabled_chat_bots")&.split("|").to_a
enabled_models = ["gpt-3.5-turbo-16k"] if enabled_models.empty?
if open_ai_api_key.present?
models << mirror_open_ai(
"GPT-3.5-Turbo",
"gpt-3.5-turbo",
8192,
"ai_openai_gpt35_url",
open_ai_api_key,
-111,
enabled_models,
)
models << mirror_open_ai(
"GPT-3.5-Turbo-16K",
"gpt-3.5-turbo-16k",
16_384,
"ai_openai_gpt35_16k_url",
open_ai_api_key,
-111,
enabled_models,
)
models << mirror_open_ai(
"GPT-4",
"gpt-4",
8192,
"ai_openai_gpt4_url",
open_ai_api_key,
-110,
enabled_models,
)
models << mirror_open_ai(
"GPT-4-32K",
"gpt-4-32k",
32_768,
"ai_openai_gpt4_32k_url",
open_ai_api_key,
-110,
enabled_models,
)
models << mirror_open_ai(
"GPT-4-Turbo",
"gpt-4-turbo",
131_072,
"ai_openai_gpt4_turbo_url",
open_ai_api_key,
-113,
enabled_models,
)
models << mirror_open_ai(
"GPT-4o",
"gpt-4o",
131_072,
"ai_openai_gpt4o_url",
open_ai_api_key,
-121,
enabled_models,
)
end
if models.present?
rows = models.compact.join(", ")
DB.exec(<<~SQL) if rows.present?
INSERT INTO llm_models(display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, user_id, enabled_chat_bot, created_at, updated_at)
VALUES #{rows};
SQL
end
end
def has_companion_user?(user_id)
DB.query_single("SELECT id FROM users WHERE id = :user_id", user_id: user_id).first.present?
end
def fetch_setting(name)
DB.query_single(
"SELECT value FROM site_settings WHERE name = :setting_name",
setting_name: name,
).first
end
def mirror_open_ai(
display_name,
name,
max_prompt_tokens,
setting_name,
key,
bot_id,
enabled_models
)
url = fetch_setting(setting_name) || "https://api.openai.com/v1/chat/completions"
user_id = has_companion_user?(bot_id) ? bot_id : "NULL"
enabled = enabled_models.include?(name)
"('#{display_name}', '#{name}', 'open_ai', 'DiscourseAi::Tokenizer::OpenAiTokenizer', #{max_prompt_tokens}, '#{url}', '#{key}', #{user_id}, #{enabled}, NOW(), NOW())"
end
def down
raise ActiveRecord::IrreversibleMigration
end
end

View File

@ -0,0 +1,109 @@
# frozen_string_literal: true
class SeedClaudeModels < ActiveRecord::Migration[7.0]
def up
claude_models = %w[claude-instant-1 claude-2 claude-3-haiku claude-3-sonnet claude-3-opus]
models = []
bedrock_secret_access_key = fetch_setting("ai_bedrock_secret_access_key")
enabled_models = fetch_setting("ai_bot_enabled_chat_bots")&.split("|").to_a
if bedrock_secret_access_key.present?
bedrock_region = fetch_setting("ai_bedrock_region") || "us-east-1"
claude_models.each do |cm|
url =
"https://bedrock-runtime.#{bedrock_region}.amazonaws.com/model/#{mapped_bedrock_model(cm)}/invoke"
bot_id = claude_bot_id(cm)
user_id = has_companion_user?(bot_id) ? bot_id : "NULL"
enabled = enabled_models.include?(cm)
models << "('#{display_name(cm)}', '#{cm}', 'aws_bedrock', 'DiscourseAi::Tokenizer::AnthropicTokenizer', 200000, '#{url}', '#{bedrock_secret_access_key}', #{user_id}, #{enabled}, NOW(), NOW())"
end
end
anthropic_ai_api_key = fetch_setting("ai_anthropic_api_key")
if anthropic_ai_api_key.present?
claude_models.each do |cm|
url = "https://api.anthropic.com/v1/messages"
bot_id = claude_bot_id(cm)
user_id = has_companion_user?(bot_id) ? bot_id : "NULL"
enabled = enabled_models.include?(cm)
models << "('#{display_name(cm)}', '#{cm}', 'anthropic', 'DiscourseAi::Tokenizer::AnthropicTokenizer', 200000, '#{url}', '#{anthropic_ai_api_key}', #{user_id}, #{enabled}, NOW(), NOW())"
end
end
if models.present?
rows = models.compact.join(", ")
DB.exec(<<~SQL, rows: rows) if rows.present?
INSERT INTO llm_models(display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, user_id, enabled_chat_bot, created_at, updated_at)
VALUES #{rows};
SQL
end
end
def has_companion_user?(user_id)
DB.query_single("SELECT id FROM users WHERE id = :user_id", user_id: user_id).first.present?
end
def fetch_setting(name)
DB.query_single(
"SELECT value FROM site_settings WHERE name = :setting_name",
setting_name: name,
).first
end
def claude_bot_id(model)
case model
when "claude-2"
-112
when "claude-3-haiku"
-119
when "claude-3-sonnet"
-118
when "claude-instant-1"
nil
when "claude-3-opus"
-117
end
end
def mapped_bedrock_model(model)
case model
when "claude-2"
"anthropic.claude-v2:1"
when "claude-3-haiku"
"anthropic.claude-3-haiku-20240307-v1:0"
when "claude-3-sonnet"
"anthropic.claude-3-sonnet-20240229-v1:0"
when "claude-instant-1"
"anthropic.claude-instant-v1"
when "claude-3-opus"
"anthropic.claude-3-opus-20240229-v1:0"
end
end
def display_name(model)
case model
when "claude-2"
"Claude 2"
when "claude-3-haiku"
"Claude 3 Haiku"
when "claude-3-sonnet"
"Claude 3 Sonnet"
when "claude-instant-1"
"Claude Instant 1"
when "claude-3-opus"
"Claude 3 Opus"
end
end
def down
raise ActiveRecord::IrreversibleMigration
end
end

View File

@ -0,0 +1,97 @@
# frozen_string_literal: true
class SeedOtherPropietaryModels < ActiveRecord::Migration[7.0]
def up
models = []
gemini_key = fetch_setting("ai_gemini_api_key")
enabled_models = fetch_setting("ai_bot_enabled_chat_bots")&.split("|").to_a
if gemini_key.present?
gemini_models = %w[gemini-pro gemini-1.5-pro gemini-1.5-flash]
gemini_models.each do |gm|
url = "https://generativelanguage.googleapis.com/v1beta/models/#{gemini_mapped_model(gm)}"
bot_user_id = "NULL"
bot_user_id = -115 if gm == "gemini-1.5-pro" && has_companion_user?(-115)
enabled = enabled_models.include?(gm)
models << "('#{gm.titleize}', '#{gm}', 'google', 'DiscourseAi::Tokenizer::OpenAiTokenizer', '#{gemini_tokens(gm)}', '#{url}', '#{gemini_key}', #{bot_user_id}, #{enabled}, NOW(), NOW())"
end
end
cohere_key = fetch_setting("ai_cohere_api_key")
if cohere_key.present?
cohere_models = %w[command-light command command-r command-r-plus]
cohere_models.each do |cm|
bot_user_id = "NULL"
bot_user_id = -120 if cm == "command-r-plus" && has_companion_user?(-120)
enabled = enabled_models.include?(cm)
models << "('#{cm.titleize}', '#{cm}', 'cohere', 'DiscourseAi::Tokenizer::OpenAiTokenizer', #{cohere_tokens(cm)}, 'https://api.cohere.ai/v1/chat', '#{cohere_key}', #{bot_user_id}, #{enabled}, NOW(), NOW())"
end
end
if models.present?
rows = models.compact.join(", ")
DB.exec(<<~SQL, rows: rows) if rows.present?
INSERT INTO llm_models(display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, user_id, enabled_chat_bot, created_at, updated_at)
VALUES #{rows};
SQL
end
end
def has_companion_user?(user_id)
DB.query_single("SELECT id FROM users WHERE id = :user_id", user_id: user_id).first.present?
end
def fetch_setting(name)
DB.query_single(
"SELECT value FROM site_settings WHERE name = :setting_name",
setting_name: name,
).first
end
def cohere_tokens(model)
case model
when "command-light"
4096
when "command"
8192
when "command-r"
131_072
when "command-r-plus"
131_072
else
8192
end
end
def gemini_mapped_model(model)
case model
when "gemini-1.5-pro"
"gemini-1.5-pro-latest"
when "gemini-1.5-flash"
"gemini-1.5-flash-latest"
else
"gemini-pro-latest"
end
end
def gemini_tokens(model)
if model.start_with?("gemini-1.5")
# technically we support 1 million tokens, but we're being conservative
800_000
else
16_384 # 50% of model tokens
end
end
def down
raise ActiveRecord::IrreversibleMigration
end
end

View File

@ -0,0 +1,74 @@
# frozen_string_literal: true
class SeedOssModels < ActiveRecord::Migration[7.0]
def up
models = []
enabled_models = fetch_setting("ai_bot_enabled_chat_bots")&.split("|").to_a
enabled = enabled_models.include?("mixtral-8x7B-Instruct-V0.1")
hf_key = fetch_setting("ai_hugging_face_api_key")
hf_url = fetch_setting("ai_hugging_face_api_url")
user_id = has_companion_user?(-114) ? -114 : "NULL"
if hf_url.present? && hf_key.present?
hf_token_limit = fetch_setting("ai_hugging_face_token_limit")
hf_display_name = fetch_setting("ai_hugging_face_model_display_name")
name = hf_display_name || "mistralai/Mixtral"
token_limit = hf_token_limit || 32_000
models << "('#{name}', '#{name}', 'hugging_face', 'DiscourseAi::Tokenizer::MixtralTokenizer', #{token_limit}, '#{hf_url}', '#{hf_key}', #{user_id}, #{enabled}, NOW(), NOW())"
end
vllm_key = fetch_setting("ai_vllm_api_key")
vllm_url = fetch_setting("ai_vllm_endpoint")
if vllm_key.present? && vllm_url.present?
url = "#{vllm_url}/v1/chat/completions"
name = "mistralai/Mixtral"
models << "('#{name}', '#{name}', 'vllm', 'DiscourseAi::Tokenizer::MixtralTokenizer', 32000, '#{url}', '#{vllm_key}', #{user_id}, #{enabled}, NOW(), NOW())"
end
vllm_srv = fetch_setting("ai_vllm_endpoint_srv")
srv_reserved_url = "https://vllm.shadowed-by-srv.invalid"
srv_record =
DB.query_single(
"SELECT id FROM llm_models WHERE url = :reserved",
reserved: srv_reserved_url,
).first
if vllm_srv.present? && srv.record.nil?
url = "https://vllm.shadowed-by-srv.invalid"
name = "mistralai/Mixtral"
models << "('vLLM SRV LLM', '#{name}', 'vllm', 'DiscourseAi::Tokenizer::MixtralTokenizer', 32000, '#{url}', '#{vllm_key}', #{user_id}, #{enabled}, NOW(), NOW())"
end
if models.present?
rows = models.compact.join(", ")
DB.exec(<<~SQL, rows: rows) if rows.present?
INSERT INTO llm_models(display_name, name, provider, tokenizer, max_prompt_tokens, url, api_key, user_id, enabled_chat_bot, created_at, updated_at)
VALUES #{rows};
SQL
end
end
def has_companion_user?(user_id)
DB.query_single("SELECT id FROM users WHERE id = :user_id", user_id: user_id).first.present?
end
def fetch_setting(name)
DB.query_single(
"SELECT value FROM site_settings WHERE name = :setting_name",
setting_name: name,
).first
end
def down
raise ActiveRecord::IrreversibleMigration
end
end

View File

@ -163,76 +163,11 @@ module DiscourseAi
end
def self.guess_model(bot_user)
# HACK(roman): We'll do this until we define how we represent different providers in the bot settings
guess =
case bot_user.id
when DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?("claude-2")
"aws_bedrock:claude-2"
else
"anthropic:claude-2"
end
when DiscourseAi::AiBot::EntryPoint::GPT4_ID
"open_ai:gpt-4"
when DiscourseAi::AiBot::EntryPoint::GPT4_TURBO_ID
"open_ai:gpt-4-turbo"
when DiscourseAi::AiBot::EntryPoint::GPT4O_ID
"open_ai:gpt-4o"
when DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID
"open_ai:gpt-3.5-turbo-16k"
when DiscourseAi::AiBot::EntryPoint::MIXTRAL_ID
mixtral_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
if DiscourseAi::Completions::Endpoints::Vllm.correctly_configured?(mixtral_model)
"vllm:#{mixtral_model}"
elsif DiscourseAi::Completions::Endpoints::HuggingFace.correctly_configured?(
mixtral_model,
)
"hugging_face:#{mixtral_model}"
else
"ollama:mistral"
end
when DiscourseAi::AiBot::EntryPoint::GEMINI_ID
"google:gemini-1.5-pro"
when DiscourseAi::AiBot::EntryPoint::FAKE_ID
"fake:fake"
when DiscourseAi::AiBot::EntryPoint::CLAUDE_3_OPUS_ID
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(
"claude-3-opus",
)
"aws_bedrock:claude-3-opus"
else
"anthropic:claude-3-opus"
end
when DiscourseAi::AiBot::EntryPoint::COHERE_COMMAND_R_PLUS
"cohere:command-r-plus"
when DiscourseAi::AiBot::EntryPoint::CLAUDE_3_SONNET_ID
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(
"claude-3-sonnet",
)
"aws_bedrock:claude-3-sonnet"
else
"anthropic:claude-3-sonnet"
end
when DiscourseAi::AiBot::EntryPoint::CLAUDE_3_HAIKU_ID
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(
"claude-3-haiku",
)
"aws_bedrock:claude-3-haiku"
else
"anthropic:claude-3-haiku"
end
else
nil
end
associated_llm = LlmModel.find_by(user_id: bot_user.id)
if guess
provider, model_name = guess.split(":")
llm_model = LlmModel.find_by(provider: provider, name: model_name)
return if associated_llm.nil? # Might be a persona user. Handled by constructor.
return "custom:#{llm_model.id}" if llm_model
end
guess
"custom:#{associated_llm.id}"
end
def build_placeholder(summary, details, custom_raw: nil)

View File

@ -6,82 +6,45 @@ module DiscourseAi
class EntryPoint
REQUIRE_TITLE_UPDATE = "discourse-ai-title-update"
GPT4_ID = -110
GPT3_5_TURBO_ID = -111
CLAUDE_V2_ID = -112
GPT4_TURBO_ID = -113
MIXTRAL_ID = -114
GEMINI_ID = -115
FAKE_ID = -116 # only used for dev and test
CLAUDE_3_OPUS_ID = -117
CLAUDE_3_SONNET_ID = -118
CLAUDE_3_HAIKU_ID = -119
COHERE_COMMAND_R_PLUS = -120
GPT4O_ID = -121
BOTS = [
[GPT4_ID, "gpt4_bot", "gpt-4"],
[GPT3_5_TURBO_ID, "gpt3.5_bot", "gpt-3.5-turbo"],
[CLAUDE_V2_ID, "claude_bot", "claude-2"],
[GPT4_TURBO_ID, "gpt4t_bot", "gpt-4-turbo"],
[MIXTRAL_ID, "mixtral_bot", "mixtral-8x7B-Instruct-V0.1"],
[GEMINI_ID, "gemini_bot", "gemini-1.5-pro"],
[FAKE_ID, "fake_bot", "fake"],
[CLAUDE_3_OPUS_ID, "claude_3_opus_bot", "claude-3-opus"],
[CLAUDE_3_SONNET_ID, "claude_3_sonnet_bot", "claude-3-sonnet"],
[CLAUDE_3_HAIKU_ID, "claude_3_haiku_bot", "claude-3-haiku"],
[COHERE_COMMAND_R_PLUS, "cohere_command_bot", "cohere-command-r-plus"],
[GPT4O_ID, "gpt4o_bot", "gpt-4o"],
]
BOT_USER_IDS = BOTS.map(&:first)
Bot = Struct.new(:id, :name, :llm)
def self.all_bot_ids
BOT_USER_IDS.concat(AiPersona.mentionables.map { |mentionable| mentionable[:user_id] })
mentionable_persona_user_ids =
AiPersona.mentionables.map { |mentionable| mentionable[:user_id] }
mentionable_bot_users = LlmModel.joins(:user).pluck("users.id")
mentionable_bot_users + mentionable_persona_user_ids
end
def self.find_bot_by_id(id)
found = DiscourseAi::AiBot::EntryPoint::BOTS.find { |bot| bot[0] == id }
return if !found
Bot.new(found[0], found[1], found[2])
def self.find_participant_in(participant_ids)
model = LlmModel.includes(:user).where(user_id: participant_ids).last
return if model.nil?
bot_user = model.user
Bot.new(bot_user.id, bot_user.username_lower, model.name)
end
def self.map_bot_model_to_user_id(model_name)
case model_name
in "gpt-4o"
GPT4O_ID
in "gpt-4-turbo"
GPT4_TURBO_ID
in "gpt-3.5-turbo"
GPT3_5_TURBO_ID
in "gpt-4"
GPT4_ID
in "claude-2"
CLAUDE_V2_ID
in "mixtral-8x7B-Instruct-V0.1"
MIXTRAL_ID
in "gemini-1.5-pro"
GEMINI_ID
in "fake"
FAKE_ID
in "claude-3-opus"
CLAUDE_3_OPUS_ID
in "claude-3-sonnet"
CLAUDE_3_SONNET_ID
in "claude-3-haiku"
CLAUDE_3_HAIKU_ID
in "cohere-command-r-plus"
COHERE_COMMAND_R_PLUS
else
nil
end
def self.find_user_from_model(model_name)
# Hack(Roman): Added this because Command R Plus had a different in the bot settings.
# Will eventually ammend it with a data migration.
name = model_name
name = "command-r-plus" if name == "cohere-command-r-plus"
LlmModel.joins(:user).where(name: name).last&.user
end
def self.enabled_user_ids_and_models_map
DB.query_hash(<<~SQL)
SELECT users.username AS username, users.id AS id, llms.name AS model_name, llms.display_name AS display_name
FROM llm_models llms
INNER JOIN users ON llms.user_id = users.id
WHERE llms.enabled_chat_bot
SQL
end
# Most errors are simply "not_allowed"
# we do not want to reveal information about this sytem
# we do not want to reveal information about this system
# the 2 exceptions are "other_people_in_pm" and "other_content_in_pm"
# in both cases you have access to the PM so we are not revealing anything
def self.ai_share_error(topic, guardian)
@ -113,8 +76,7 @@ module DiscourseAi
end
plugin.on(:site_setting_changed) do |name, _old_value, _new_value|
if name == :ai_bot_enabled_chat_bots || name == :ai_bot_enabled ||
name == :discourse_ai_enabled
if name == :ai_bot_enabled || name == :discourse_ai_enabled
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
end
end
@ -170,35 +132,23 @@ module DiscourseAi
scope.user.in_any_groups?(SiteSetting.ai_bot_allowed_groups_map)
end,
) do
model_map = {}
SiteSetting
.ai_bot_enabled_chat_bots
.split("|")
.each do |bot_name|
model_map[
::DiscourseAi::AiBot::EntryPoint.map_bot_model_to_user_id(bot_name)
] = bot_name
end
bots_map = ::DiscourseAi::AiBot::EntryPoint.enabled_user_ids_and_models_map
# not 100% ideal, cause it is one extra query, but we need it
bots = DB.query_hash(<<~SQL, user_ids: model_map.keys)
SELECT username, id FROM users WHERE id IN (:user_ids)
SQL
bots.each { |hash| hash["model_name"] = model_map[hash["id"]] }
persona_users = AiPersona.persona_users(user: scope.user)
if persona_users.present?
bots.concat(
bots_map.concat(
persona_users.map do |persona_user|
{
"id" => persona_user[:user_id],
"username" => persona_user[:username],
"mentionable" => persona_user[:mentionable],
"is_persona" => true,
}
end,
)
end
bots
bots_map
end
plugin.add_to_serializer(:current_user, :can_use_assistant) do

View File

@ -59,7 +59,7 @@ module DiscourseAi
def self.schedule_reply(post)
return if is_bot_user_id?(post.user_id)
bot_ids = DiscourseAi::AiBot::EntryPoint::BOT_USER_IDS
bot_ids = LlmModel.joins(:user).pluck("users.id")
mentionables = AiPersona.mentionables(user: post.user)
bot_user = nil
@ -491,22 +491,20 @@ module DiscourseAi
def available_bot_usernames
@bot_usernames ||=
AiPersona
.joins(:user)
.pluck(:username)
.concat(DiscourseAi::AiBot::EntryPoint::BOTS.map(&:second))
AiPersona.joins(:user).pluck(:username).concat(available_bot_users.map(&:username))
end
def available_bot_user_ids
@bot_ids ||=
AiPersona
.joins(:user)
.pluck("users.id")
.concat(DiscourseAi::AiBot::EntryPoint::BOTS.map(&:first))
@bot_ids ||= AiPersona.joins(:user).pluck("users.id").concat(available_bot_users.map(&:id))
end
private
def available_bot_users
@available_bots ||=
User.joins("INNER JOIN llm_models llm ON llm.user_id = users.id").where(active: true)
end
def publish_final_update(reply_post)
return if @published_final_update
if reply_post

View File

@ -2,44 +2,6 @@
module DiscourseAi::AiBot::SiteSettingsExtension
def self.enable_or_disable_ai_bots
enabled_bots = SiteSetting.ai_bot_enabled_chat_bots_map
enabled_bots = [] if !SiteSetting.ai_bot_enabled
DiscourseAi::AiBot::EntryPoint::BOTS.each do |id, bot_name, name|
if id == DiscourseAi::AiBot::EntryPoint::FAKE_ID
next if Rails.env.production?
end
active = enabled_bots.include?(name)
user = User.find_by(id: id)
if active
if !user
user =
User.new(
id: id,
email: "no_email_#{name}",
name: bot_name.titleize,
username: UserNameSuggester.suggest(bot_name),
active: true,
approved: true,
admin: true,
moderator: true,
trust_level: TrustLevel[4],
)
user.save!(validate: false)
else
user.update_columns(active: true)
end
elsif !active && user
# will include deleted
has_posts = DB.query_single("SELECT 1 FROM posts WHERE user_id = #{id} LIMIT 1").present?
if has_posts
user.update_columns(active: false) if user.active
else
user.destroy
end
end
end
LlmModel.find_each { |llm_model| llm_model.toggle_companion_user }
end
end

View File

@ -44,7 +44,9 @@ module DiscourseAi
private
def model_uri
return URI(llm_model.url) if llm_model&.url
if llm_model&.url && !llm_model&.url == LlmModel::RESERVED_VLLM_SRV_URL
return URI(llm_model.url)
end
service = DiscourseAi::Utils::DnsSrv.lookup(SiteSetting.ai_vllm_endpoint_srv)
if service.present?

View File

@ -135,7 +135,7 @@ module DiscourseAi
dialect_klass = DiscourseAi::Completions::Dialects::Dialect.dialect_for(model_name)
if @canned_response
if @canned_llm && @canned_llm != model_name
if @canned_llm && @canned_llm != [provider_name, model_name].join(":")
raise "Invalid call LLM call, expected #{@canned_llm} but got #{model_name}"
end

View File

@ -27,6 +27,22 @@ module DiscourseAi
llm_models
end
end
def self.available_ai_bots
%w[
gpt-3.5-turbo
gpt-4
gpt-4-turbo
gpt-4o
claude-2
gemini-1.5-pro
mixtral-8x7B-Instruct-V0.1
claude-3-opus
claude-3-sonnet
claude-3-haiku
cohere-command-r-plus
]
end
end
end
end

View File

@ -43,6 +43,8 @@ after_initialize do
add_admin_route("discourse_ai.title", "discourse-ai", { use_new_show_route: true })
LlmModel.enable_or_disable_srv_llm!
[
DiscourseAi::Embeddings::EntryPoint.new,
DiscourseAi::Nsfw::EntryPoint.new,

View File

@ -3,13 +3,15 @@
RSpec.describe Jobs::SharedConversationAdjustUploadSecurity do
let(:params) { {} }
fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") }
fab!(:bot_user) do
SiteSetting.discourse_ai_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "claude-2"
toggle_enabled_bots(bots: [claude_2])
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_allowed_groups = "10"
SiteSetting.ai_bot_public_sharing_allowed_groups = "10"
User.find(DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID)
claude_2.reload.user
end
fab!(:user)
fab!(:topic) { Fabricate(:private_message_topic, user: user, recipient: bot_user) }

View File

@ -4,13 +4,15 @@ RSpec.describe DiscourseAi::AiBot::Bot do
subject(:bot) { described_class.as(bot_user) }
fab!(:admin)
fab!(:gpt_4) { Fabricate(:llm_model, name: "gpt-4") }
fab!(:fake) { Fabricate(:llm_model, name: "fake", provider: "fake") }
before do
SiteSetting.ai_bot_enabled_chat_bots = "gpt-4"
toggle_enabled_bots(bots: [gpt_4])
SiteSetting.ai_bot_enabled = true
end
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT4_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-4") }
let!(:user) { Fabricate(:user) }
@ -33,11 +35,10 @@ RSpec.describe DiscourseAi::AiBot::Bot do
DiscourseAi::Completions::Endpoints::Fake.delays = []
DiscourseAi::Completions::Endpoints::Fake.last_call = nil
SiteSetting.ai_bot_enabled_chat_bots = "fake"
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [fake])
Group.refresh_automatic_groups!
bot_user = User.find(DiscourseAi::AiBot::EntryPoint::FAKE_ID)
bot_user = DiscourseAi::AiBot::EntryPoint.find_user_from_model("fake")
AiPersona.create!(
name: "TestPersona",
top_p: 0.5,

View File

@ -4,9 +4,13 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
describe "#inject_into" do
describe "subscribes to the post_created event" do
fab!(:admin)
let(:gpt_bot) { User.find(described_class::GPT4_ID) }
fab!(:bot_allowed_group) { Fabricate(:group) }
fab!(:gpt_4) { Fabricate(:llm_model, name: "gpt-4") }
let(:gpt_bot) { gpt_4.reload.user }
fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") }
let(:post_args) do
{
title: "Dear AI, I want to ask a question",
@ -17,7 +21,7 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
end
before do
SiteSetting.ai_bot_enabled_chat_bots = "gpt-4|claude-2"
toggle_enabled_bots(bots: [gpt_4, claude_2])
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_allowed_groups = bot_allowed_group.id
bot_allowed_group.add(admin)
@ -104,7 +108,7 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
end
it "includes the bot's user_id" do
claude_bot = User.find(described_class::CLAUDE_V2_ID)
claude_bot = DiscourseAi::AiBot::EntryPoint.find_user_from_model("claude-2")
claude_post_attrs = post_args.merge(target_usernames: [claude_bot.username].join(","))
expect { PostCreator.create!(admin, claude_post_attrs) }.to change(

View File

@ -1,7 +1,11 @@
# frozen_string_literal: true
RSpec.describe Jobs::CreateAiReply do
before { SiteSetting.ai_bot_enabled = true }
fab!(:gpt_35_bot) { Fabricate(:llm_model, name: "gpt-3.5-turbo") }
before do
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [gpt_35_bot])
end
describe "#execute" do
fab!(:topic)
@ -15,10 +19,12 @@ RSpec.describe Jobs::CreateAiReply do
it "adds a reply from the bot" do
persona_id = AiPersona.find_by(name: "Forum Helper").id
bot_user = DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo")
DiscourseAi::Completions::Llm.with_prepared_responses([expected_response]) do
subject.execute(
post_id: topic.first_post.id,
bot_user_id: DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID,
bot_user_id: bot_user.id,
persona_id: persona_id,
)
end

View File

@ -2,10 +2,12 @@
RSpec.describe Jobs::UpdateAiBotPmTitle do
let(:user) { Fabricate(:admin) }
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID) }
fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("claude-2") }
before do
SiteSetting.ai_bot_enabled_chat_bots = "claude-2"
toggle_enabled_bots(bots: [claude_2])
SiteSetting.ai_bot_enabled = true
end

View File

@ -3,10 +3,12 @@
RSpec.describe DiscourseAi::AiBot::Playground do
subject(:playground) { described_class.new(bot) }
fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") }
fab!(:bot_user) do
SiteSetting.ai_bot_enabled_chat_bots = "claude-2"
toggle_enabled_bots(bots: [claude_2])
SiteSetting.ai_bot_enabled = true
User.find(DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID)
claude_2.reload.user
end
fab!(:bot) do
@ -409,7 +411,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
it "allows mentioning a persona" do
# we still should be able to mention with no bots
SiteSetting.ai_bot_enabled_chat_bots = ""
toggle_enabled_bots(bots: [])
post = nil
DiscourseAi::Completions::Llm.with_prepared_responses(["Yes I can"]) do
@ -428,7 +430,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
it "allows PMing a persona even when no particular bots are enabled" do
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = ""
toggle_enabled_bots(bots: [])
post = nil
DiscourseAi::Completions::Llm.with_prepared_responses(
@ -456,17 +458,20 @@ RSpec.describe DiscourseAi::AiBot::Playground do
end
it "picks the correct llm for persona in PMs" do
gpt_35_turbo = Fabricate(:llm_model, name: "gpt-3.5-turbo")
gpt_35_turbo_16k = Fabricate(:llm_model, name: "gpt-3.5-turbo-16k")
# If you start a PM with GPT 3.5 bot, replies should come from it, not from Claude
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "gpt-3.5-turbo|claude-2"
toggle_enabled_bots(bots: [gpt_35_turbo, claude_2])
post = nil
gpt3_5_bot_user = User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID)
gpt3_5_bot_user = gpt_35_turbo.reload.user
# title is queued first, ensures it uses the llm targeted via target_usernames not claude
DiscourseAi::Completions::Llm.with_prepared_responses(
["Magic title", "Yes I can"],
llm: "open_ai:gpt-3.5-turbo-16k",
llm: "open_ai:gpt-3.5-turbo",
) do
post =
create_post(
@ -498,7 +503,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
# replies as correct persona if replying direct to persona
DiscourseAi::Completions::Llm.with_prepared_responses(
["Another reply"],
llm: "open_ai:gpt-3.5-turbo-16k",
llm: "open_ai:gpt-3.5-turbo",
) do
create_post(
raw: "Please ignore this bot, I am replying to a user",

View File

@ -1,38 +1,62 @@
#frozen_string_literal: true
describe DiscourseAi::AiBot::SiteSettingsExtension do
fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") }
fab!(:gpt_4) { Fabricate(:llm_model, name: "gpt-4") }
fab!(:gpt_35_turbo) { Fabricate(:llm_model, name: "gpt-3.5-turbo") }
def user_exists?(model)
DiscourseAi::AiBot::EntryPoint.find_user_from_model(model).present?
end
before { SiteSetting.discourse_ai_enabled = true }
it "correctly creates/deletes bot accounts as needed" do
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "gpt-4"
gpt_4.update!(enabled_chat_bot: true)
claude_2.update!(enabled_chat_bot: false)
gpt_35_turbo.update!(enabled_chat_bot: false)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::GPT4_ID)).to eq(true)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID)).to eq(false)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID)).to eq(false)
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
SiteSetting.ai_bot_enabled_chat_bots = "gpt-3.5-turbo"
expect(user_exists?("gpt-4")).to eq(true)
expect(user_exists?("gpt-3.5-turbo")).to eq(false)
expect(user_exists?("claude-2")).to eq(false)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::GPT4_ID)).to eq(false)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID)).to eq(true)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID)).to eq(false)
gpt_4.update!(enabled_chat_bot: false)
claude_2.update!(enabled_chat_bot: false)
gpt_35_turbo.update!(enabled_chat_bot: true)
SiteSetting.ai_bot_enabled_chat_bots = "gpt-3.5-turbo|claude-2"
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::GPT4_ID)).to eq(false)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID)).to eq(true)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID)).to eq(true)
expect(user_exists?("gpt-4")).to eq(false)
expect(user_exists?("gpt-3.5-turbo")).to eq(true)
expect(user_exists?("claude-2")).to eq(false)
gpt_4.update!(enabled_chat_bot: false)
claude_2.update!(enabled_chat_bot: true)
gpt_35_turbo.update!(enabled_chat_bot: true)
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
expect(user_exists?("gpt-4")).to eq(false)
expect(user_exists?("gpt-3.5-turbo")).to eq(true)
expect(user_exists?("claude-2")).to eq(true)
SiteSetting.ai_bot_enabled = false
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::GPT4_ID)).to eq(false)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID)).to eq(false)
expect(User.exists?(id: DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID)).to eq(false)
expect(user_exists?("gpt-4")).to eq(false)
expect(user_exists?("gpt-3.5-turbo")).to eq(false)
expect(user_exists?("claude-2")).to eq(false)
end
it "leaves accounts around if they have any posts" do
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "gpt-4"
gpt_4.update!(enabled_chat_bot: true)
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
user = User.find(DiscourseAi::AiBot::EntryPoint::GPT4_ID)
user = DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-4")
create_post(user: user, raw: "this is a test post")

View File

@ -3,7 +3,15 @@
RSpec.describe DiscourseAi::AiBot::Tools::DallE do
let(:prompts) { ["a pink cow", "a red cow"] }
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
fab!(:gpt_35_turbo) { Fabricate(:llm_model, name: "gpt-3.5-turbo") }
before do
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [gpt_35_turbo])
SiteSetting.ai_openai_api_key = "abc"
end
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
let(:progress_blk) { Proc.new {} }
@ -15,11 +23,6 @@ RSpec.describe DiscourseAi::AiBot::Tools::DallE do
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8BQDwAEhQGAhKmMIQAAAABJRU5ErkJggg=="
end
before do
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_openai_api_key = "abc"
end
describe "#process" do
it "can generate tall images" do
generator =

View File

@ -1,7 +1,7 @@
#frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::DbSchema do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
before { SiteSetting.ai_bot_enabled = true }

View File

@ -5,7 +5,7 @@ RSpec.describe DiscourseAi::AiBot::Tools::DiscourseMetaSearch do
SiteSetting.ai_openai_api_key = "asd"
end
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
let(:progress_blk) { Proc.new {} }

View File

@ -1,7 +1,7 @@
#frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::Google do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
let(:progress_blk) { Proc.new {} }
let(:search) { described_class.new({ query: "some search term" }, bot_user: bot_user, llm: llm) }

View File

@ -1,11 +1,7 @@
#frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::Image do
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
let(:progress_blk) { Proc.new {} }
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:prompts) { ["a pink cow", "a red cow"] }
let(:tool) do
@ -18,7 +14,15 @@ RSpec.describe DiscourseAi::AiBot::Tools::Image do
)
end
before { SiteSetting.ai_bot_enabled = true }
fab!(:gpt_35_turbo) { Fabricate(:llm_model, name: "gpt-3.5-turbo") }
before do
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [gpt_35_turbo])
end
let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{gpt_35_turbo.id}") }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
describe "#process" do
it "can generate correct info" do

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::JavascriptEvaluator do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
let(:progress_blk) { Proc.new {} }

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::ListCategories do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
before { SiteSetting.ai_bot_enabled = true }

View File

@ -1,7 +1,7 @@
#frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::ListTags do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
before do

View File

@ -1,7 +1,7 @@
#frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::Read do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
let(:tool) { described_class.new({ topic_id: topic_with_tags.id }, bot_user: bot_user, llm: llm) }

View File

@ -1,10 +1,14 @@
#frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::SearchSettings do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
fab!(:gpt_35_bot) { Fabricate(:llm_model, name: "gpt-3.5-turbo") }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
before { SiteSetting.ai_bot_enabled = true }
before do
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [gpt_35_bot])
end
def search_settings(query)
described_class.new({ query: query }, bot_user: bot_user, llm: llm)
@ -25,8 +29,7 @@ RSpec.describe DiscourseAi::AiBot::Tools::SearchSettings do
end
it "can return descriptions if there are few matches" do
results =
search_settings("this will not be found!@,default_locale,ai_bot_enabled_chat_bots").invoke
results = search_settings("this will not be found!@,default_locale,ai_bot_enabled").invoke
expect(results[:rows].length).to eq(2)

View File

@ -6,7 +6,7 @@ RSpec.describe DiscourseAi::AiBot::Tools::Search do
before { SiteSetting.ai_openai_api_key = "asd" }
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
let(:progress_blk) { Proc.new {} }

View File

@ -9,7 +9,7 @@ def has_rg?
end
RSpec.describe DiscourseAi::AiBot::Tools::SettingContext, if: has_rg? do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
before { SiteSetting.ai_bot_enabled = true }

View File

@ -1,7 +1,7 @@
#frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::Summarize do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
let(:progress_blk) { Proc.new {} }

View File

@ -1,7 +1,7 @@
#frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::Time do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
before { SiteSetting.ai_bot_enabled = true }

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::WebBrowser do
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-4-turbo") }
before do

View File

@ -3,10 +3,12 @@
require "rails_helper"
RSpec.describe SharedAiConversation, type: :model do
fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") }
before do
SiteSetting.discourse_ai_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "claude-2"
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [claude_2])
end
fab!(:user)
@ -19,7 +21,7 @@ RSpec.describe SharedAiConversation, type: :model do
<p>This is some other text</p>
HTML
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID) }
let(:bot_user) { claude_2.reload.user }
let!(:topic) { Fabricate(:private_message_topic, recipient: bot_user) }
let!(:post1) { Fabricate(:post, topic: topic, post_number: 1) }
let!(:post2) { Fabricate(:post, topic: topic, post_number: 2, raw: raw_with_details) }

11
spec/plugin_helper.rb Normal file
View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
module DiscourseAi::ChatBotHelper
def toggle_enabled_bots(bots: [])
LlmModel.update_all(enabled_chat_bot: false)
bots.each { |b| b.update!(enabled_chat_bot: true) }
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
end
end
RSpec.configure { |c| c.include DiscourseAi::ChatBotHelper }

View File

@ -76,11 +76,15 @@ RSpec.describe DiscourseAi::AiBot::BotController do
describe "#show_bot_username" do
it "returns the username_lower of the selected bot" do
SiteSetting.ai_bot_enabled = true
gpt_3_5_bot = "gpt-3.5-turbo"
expected_username = User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID).username_lower
gpt_35_bot = Fabricate(:llm_model, name: "gpt-3.5-turbo")
get "/discourse-ai/ai-bot/bot-username", params: { username: gpt_3_5_bot }
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [gpt_35_bot])
expected_username =
DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo").username_lower
get "/discourse-ai/ai-bot/bot-username", params: { username: gpt_35_bot.name }
expect(response.status).to eq(200)
expect(response.parsed_body["bot_username"]).to eq(expected_username)

View File

@ -5,12 +5,14 @@ require "rails_helper"
RSpec.describe DiscourseAi::AiBot::SharedAiConversationsController do
before do
SiteSetting.discourse_ai_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "claude-2"
toggle_enabled_bots(bots: [claude_2])
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_allowed_groups = "10"
SiteSetting.ai_bot_public_sharing_allowed_groups = "10"
end
fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") }
fab!(:user) { Fabricate(:user, refresh_auto_groups: true) }
fab!(:topic)
fab!(:pm) { Fabricate(:private_message_topic) }
@ -18,11 +20,11 @@ RSpec.describe DiscourseAi::AiBot::SharedAiConversationsController do
fab!(:bot_user) do
SiteSetting.discourse_ai_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "claude-2"
toggle_enabled_bots(bots: [claude_2])
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_allowed_groups = "10"
SiteSetting.ai_bot_public_sharing_allowed_groups = "10"
User.find(DiscourseAi::AiBot::EntryPoint::CLAUDE_V2_ID)
claude_2.reload.user
end
fab!(:user_pm_share) do

View File

@ -3,9 +3,12 @@ RSpec.describe "AI chat channel summarization", type: :system, js: true do
fab!(:user)
fab!(:group) { Fabricate(:group, visibility_level: Group.visibility_levels[:staff]) }
fab!(:gpt_4) { Fabricate(:llm_model, name: "gpt-4") }
fab!(:gpt_3_5_turbo) { Fabricate(:llm_model, name: "gpt-3.5-turbo") }
before do
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "gpt-4|gpt-3.5-turbo"
toggle_enabled_bots(bots: [gpt_4, gpt_3_5_turbo])
SiteSetting.ai_bot_allowed_groups = group.id.to_s
sign_in(user)
end

View File

@ -2,7 +2,15 @@
RSpec.describe "Share conversation via link", type: :system do
fab!(:admin) { Fabricate(:admin, username: "ai_sharer") }
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT4_ID) }
fab!(:gpt_4) { Fabricate(:llm_model, name: "gpt-4") }
before do
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [gpt_4])
SiteSetting.ai_bot_public_sharing_allowed_groups = "1" # admin
Group.refresh_automatic_groups!
sign_in(admin)
end
let(:pm) do
Fabricate(
@ -15,14 +23,6 @@ RSpec.describe "Share conversation via link", type: :system do
let!(:op) { Fabricate(:post, topic: pm, user: admin, raw: "test test test user reply") }
before do
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "gpt-4"
SiteSetting.ai_bot_public_sharing_allowed_groups = "1" # admin
Group.refresh_automatic_groups!
sign_in(admin)
end
it "does not show share button for my own PMs without bot" do
visit(pm.url)
expect(Guardian.new(admin).can_share_ai_bot_conversation?(pm)).to eq(false)

View File

@ -1,10 +1,11 @@
# frozen_string_literal: true
RSpec.describe "AI personas", type: :system, js: true do
fab!(:admin)
fab!(:gpt_4) { Fabricate(:llm_model, name: "gpt-4") }
before do
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "gpt-4"
toggle_enabled_bots(bots: [gpt_4])
sign_in(admin)
Group.refresh_automatic_groups!

View File

@ -1,7 +1,8 @@
# frozen_string_literal: true
RSpec.describe "Share conversation", type: :system do
fab!(:admin) { Fabricate(:admin, username: "ai_sharer") }
let(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT4_ID) }
fab!(:gpt_4) { Fabricate(:llm_model, name: "gpt-4") }
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-4") }
let(:pm) do
Fabricate(
@ -31,7 +32,7 @@ RSpec.describe "Share conversation", type: :system do
before do
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "gpt-4"
toggle_enabled_bots(bots: [gpt_4])
sign_in(admin)
bot_user.update!(username: "gpt-4")