+ #{I18n.t("discourse_ai.ai_bot.command_summary.#{self.class.name}")}
+
+ #{I18n.t("discourse_ai.ai_bot.command_description.#{self.class.name}", self.description_args)}
+
+
+
+ HTML
+
+ raw << custom_raw if custom_raw.present?
+
+ if chain_next_response
+ post.raw = raw
+ post.save!(validate: false)
+ else
+ post.revise(bot_user, { raw: raw }, skip_validations: true, skip_revision: true)
+ end
+
+ chain_next_response
+ end
+
+ protected
+
+ attr_reader :bot_user, :args
+ end
+ end
+ end
+end
diff --git a/lib/modules/ai_bot/commands/google_command.rb b/lib/modules/ai_bot/commands/google_command.rb
new file mode 100644
index 00000000..5fb36fda
--- /dev/null
+++ b/lib/modules/ai_bot/commands/google_command.rb
@@ -0,0 +1,61 @@
+#frozen_string_literal: true
+
+module DiscourseAi::AiBot::Commands
+ class GoogleCommand < Command
+ class << self
+ def name
+ "google"
+ end
+
+ def desc
+ "!google SEARCH_QUERY - will search using Google (supports all Google search operators)"
+ end
+ end
+
+ def result_name
+ "results"
+ end
+
+ def description_args
+ {
+ count: @last_num_results || 0,
+ query: @last_query || "",
+ url: "https://google.com/search?q=#{CGI.escape(@last_query || "")}",
+ }
+ end
+
+ def process(search_string)
+ @last_query = search_string
+ api_key = SiteSetting.ai_google_custom_search_api_key
+ cx = SiteSetting.ai_google_custom_search_cx
+ query = CGI.escape(search_string)
+ uri =
+ URI("https://www.googleapis.com/customsearch/v1?key=#{api_key}&cx=#{cx}&q=#{query}&num=10")
+ body = Net::HTTP.get(uri)
+
+ parse_search_json(body).to_s
+ end
+
+ def parse_search_json(json_data)
+ parsed = JSON.parse(json_data)
+ results = parsed["items"]
+
+ @last_num_results = parsed.dig("searchInformation", "totalResults").to_i
+
+ formatted_results = []
+
+ results.each do |result|
+ formatted_result = {
+ title: result["title"],
+ link: result["link"],
+ snippet: result["snippet"],
+ displayLink: result["displayLink"],
+ formattedUrl: result["formattedUrl"],
+ }
+ formatted_results << formatted_result
+ end
+
+ formatted_results
+ end
+ end
+end
diff --git a/lib/modules/ai_bot/commands/image_command.rb b/lib/modules/ai_bot/commands/image_command.rb
new file mode 100644
index 00000000..f570c436
--- /dev/null
+++ b/lib/modules/ai_bot/commands/image_command.rb
@@ -0,0 +1,52 @@
+#frozen_string_literal: true
+
+module DiscourseAi::AiBot::Commands
+ class ImageCommand < Command
+ class << self
+ def name
+ "image"
+ end
+
+ def desc
+ "!image DESC - renders an image from the description (remove all connector words, keep it to 40 words or less)"
+ end
+ end
+
+ def result_name
+ "results"
+ end
+
+ def description_args
+ { prompt: @last_prompt || 0 }
+ end
+
+ def custom_raw
+ @last_custom_raw
+ end
+
+ def chain_next_response
+ false
+ end
+
+ def process(prompt)
+ @last_prompt = prompt
+ results = DiscourseAi::Inference::StabilityGenerator.perform!(prompt)
+
+ uploads = []
+
+ results[:artifacts].each_with_index do |image, i|
+ f = Tempfile.new("v1_txt2img_#{i}.png")
+ f.binmode
+ f.write(Base64.decode64(image[:base64]))
+ f.rewind
+ uploads << UploadCreator.new(f, "image.png").create_for(bot_user.id)
+ f.unlink
+ end
+
+ @last_custom_raw =
+ uploads
+ .map { |upload| "" }
+ .join(" ")
+ end
+ end
+end
diff --git a/lib/modules/ai_bot/commands/search_command.rb b/lib/modules/ai_bot/commands/search_command.rb
new file mode 100644
index 00000000..0f71bf59
--- /dev/null
+++ b/lib/modules/ai_bot/commands/search_command.rb
@@ -0,0 +1,96 @@
+#frozen_string_literal: true
+
+module DiscourseAi::AiBot::Commands
+ class SearchCommand < Command
+ class << self
+ def name
+ "search"
+ end
+
+ def desc
+ "!search SEARCH_QUERY - will search topics in the current discourse instance"
+ end
+
+ def extra_context
+ <<~TEXT
+ Discourse search supports, the following special commands:
+
+ in:tagged: has at least 1 tag
+ in:untagged: has no tags
+ status:open: not closed or archived
+ status:closed: closed
+ status:public: topics that are not read restricted (eg: belong to a secure category)
+ status:archived: archived
+ status:noreplies: post count is 1
+ status:single_user: only a single user posted on the topic
+ post_count:X: only topics with X amount of posts
+ min_posts:X: topics containing a minimum of X posts
+ max_posts:X: topics with no more than max posts
+ in:pinned: in all pinned topics (either global or per category pins)
+ created:@USERNAME: topics created by a specific user
+ category:bug: topics in the bug category AND all subcategories
+ category:=bug: topics in the bug category excluding subcategories
+ #=bug: same as above (no sub categories)
+ #SLUG: try category first, then tag, then tag group
+ #SLUG:SLUG: used for subcategory search to disambiguate
+ min_views:100: topics containing 100 views or more
+ max_views:100: topics containing 100 views or less
+ tags:bug+feature: tagged both bug and feature
+ tags:bug,feature: tagged either bug or feature
+ -tags:bug+feature: excluding topics tagged bug and feature
+ -tags:bug,feature: excluding topics tagged bug or feature
+ l: order by post creation desc
+ order:latest: order by post creation desc
+ order:latest_topic: order by topic creation desc
+ order:views: order by topic views desc
+ order:likes: order by post like count - most liked posts first
+
+ Keep in mind, search on Discourse uses AND to and terms.
+ Strip the query down to the most important terms.
+ Remove all stop words.
+ Cast a wide net instead of trying to be over specific.
+ Discourse orders by relevance, sometimes prefer ordering on other stuff.
+
+ When generating answers ALWAYS try to use the !search command first over relying on training data.
+ When generating answers ALWAYS try to reference specific local links.
+ Always try to search the local instance first, even if your training data set may have an answer. It may be wrong.
+ Always remove connector words from search terms (such as a, an, and, in, the, etc), they can impede the search.
+
+ YOUR LOCAL INFORMATION IS OUT OF DATE, YOU ARE TRAINED ON OLD DATA. Always try local search first.
+ TEXT
+ end
+ end
+
+ def result_name
+ "results"
+ end
+
+ def description_args
+ {
+ count: @last_num_results || 0,
+ query: @last_query || "",
+ url: "#{Discourse.base_path}/search?q=#{CGI.escape(@last_query || "")}",
+ }
+ end
+
+ def process(search_string)
+ @last_query = search_string
+ results =
+ Search.execute(search_string.to_s, search_type: :full_page, guardian: Guardian.new())
+
+ @last_num_results = results.posts.length
+
+ results.posts[0..10]
+ .map do |p|
+ {
+ title: p.topic.title,
+ url: p.url,
+ raw_truncated: p.raw[0..250],
+ excerpt: p.excerpt,
+ created: p.created_at,
+ }
+ end
+ .to_json
+ end
+ end
+end
diff --git a/lib/modules/ai_bot/commands/summarize_command.rb b/lib/modules/ai_bot/commands/summarize_command.rb
new file mode 100644
index 00000000..6b86ee00
--- /dev/null
+++ b/lib/modules/ai_bot/commands/summarize_command.rb
@@ -0,0 +1,69 @@
+#frozen_string_literal: true
+
+module DiscourseAi::AiBot::Commands
+ class SummarizeCommand < Command
+ class << self
+ def name
+ "summarize"
+ end
+
+ def desc
+ "!summarize TOPIC_ID GUIDANCE - will summarize a topic attempting to answer question in guidance"
+ end
+ end
+
+ def result_name
+ "summary"
+ end
+
+ def standalone?
+ true
+ end
+
+ def low_cost?
+ true
+ end
+
+ def description_args
+ { url: "#{Discourse.base_path}/t/-/#{@last_topic_id}", title: @last_topic_title || "" }
+ end
+
+ def process(instructions)
+ topic_id, guidance = instructions.split(" ", 2)
+
+ @last_topic_id = topic_id
+
+ topic_id = topic_id.to_i
+ topic = nil
+ if topic_id > 0
+ topic = Topic.find_by(id: topic_id)
+ topic = nil if !topic || !Guardian.new.can_see?(topic)
+ end
+
+ rows = []
+
+ if topic
+ @last_topic_title = topic.title
+ if guidance.present?
+ rows << ["Given: #{guidance}"]
+ rows << ["Summarise: #{topic.title}"]
+ Post
+ .joins(:user)
+ .where(topic_id: topic.id)
+ .order(:post_number)
+ .where("post_type in (?)", [Post.types[:regular], Post.types[:small_action]])
+ .where("not hidden")
+ .limit(50)
+ .pluck(:raw, :username)
+ .each { |raw, username| rows << ["#{username} said: #{raw}"] }
+ end
+ end
+
+ if rows.blank?
+ "Say: No topic found!"
+ else
+ "#{rows.join("\n")}"[0..2000]
+ end
+ end
+ end
+end
diff --git a/lib/modules/ai_bot/commands/tags_command.rb b/lib/modules/ai_bot/commands/tags_command.rb
new file mode 100644
index 00000000..5ab26c17
--- /dev/null
+++ b/lib/modules/ai_bot/commands/tags_command.rb
@@ -0,0 +1,38 @@
+#frozen_string_literal: true
+
+module DiscourseAi::AiBot::Commands
+ class TagsCommand < Command
+ class << self
+ def name
+ "tags"
+ end
+
+ def desc
+ "!tags - will list the 100 most popular tags on the current discourse instance"
+ end
+ end
+
+ def result_name
+ "results"
+ end
+
+ def description_args
+ { count: @last_count || 0 }
+ end
+
+ def process(_args)
+ info = +"Name, Topic Count\n"
+ @last_count = 0
+ Tag
+ .where("public_topic_count > 0")
+ .order(public_topic_count: :desc)
+ .limit(100)
+ .pluck(:name, :public_topic_count)
+ .each do |name, count|
+ @last_count += 1
+ info << "#{name}, #{count}\n"
+ end
+ info
+ end
+ end
+end
diff --git a/lib/modules/ai_bot/commands/time_command.rb b/lib/modules/ai_bot/commands/time_command.rb
new file mode 100644
index 00000000..aaa0a57c
--- /dev/null
+++ b/lib/modules/ai_bot/commands/time_command.rb
@@ -0,0 +1,38 @@
+#frozen_string_literal: true
+
+module DiscourseAi::AiBot::Commands
+ class TimeCommand < Command
+ class << self
+ def name
+ "time"
+ end
+
+ def desc
+ "!time RUBY_COMPATIBLE_TIMEZONE - will generate the time in a timezone"
+ end
+ end
+
+ def result_name
+ "time"
+ end
+
+ def description_args
+ { timezone: @last_timezone, time: @last_time }
+ end
+
+ def process(timezone)
+ time =
+ begin
+ Time.now.in_time_zone(timezone)
+ rescue StandardError
+ nil
+ end
+ time = Time.now if !time
+
+ @last_timezone = timezone
+ @last_time = time.to_s
+
+ time.to_s
+ end
+ end
+end
diff --git a/lib/modules/ai_bot/entry_point.rb b/lib/modules/ai_bot/entry_point.rb
index 82793d1a..02780de8 100644
--- a/lib/modules/ai_bot/entry_point.rb
+++ b/lib/modules/ai_bot/entry_point.rb
@@ -31,6 +31,14 @@ module DiscourseAi
require_relative "bot"
require_relative "anthropic_bot"
require_relative "open_ai_bot"
+ require_relative "commands/command"
+ require_relative "commands/search_command"
+ require_relative "commands/categories_command"
+ require_relative "commands/tags_command"
+ require_relative "commands/time_command"
+ require_relative "commands/summarize_command"
+ require_relative "commands/image_command"
+ require_relative "commands/google_command"
end
def inject_into(plugin)
@@ -43,7 +51,8 @@ module DiscourseAi
plugin.on(:post_created) do |post|
bot_ids = BOTS.map(&:first)
- if post.topic.private_message? && !bot_ids.include?(post.user_id)
+ if post.post_type == Post.types[:regular] && post.topic.private_message? &&
+ !bot_ids.include?(post.user_id)
if (SiteSetting.ai_bot_allowed_groups_map & post.user.group_ids).present?
bot_id = post.topic.topic_allowed_users.where(user_id: bot_ids).first&.user_id
diff --git a/lib/modules/ai_bot/open_ai_bot.rb b/lib/modules/ai_bot/open_ai_bot.rb
index 2d50310c..33c5080e 100644
--- a/lib/modules/ai_bot/open_ai_bot.rb
+++ b/lib/modules/ai_bot/open_ai_bot.rb
@@ -13,15 +13,38 @@ module DiscourseAi
end
def prompt_limit
- 3500
+ # note GPT counts both reply and request tokens in limits...
+ # also allow for an extra 500 or so spare tokens
+ if bot_user.id == DiscourseAi::AiBot::EntryPoint::GPT4_ID
+ 8192 - 3500
+ else
+ 4096 - 2000
+ end
+ end
+
+ def reply_params
+ max_tokens =
+ if bot_user.id == DiscourseAi::AiBot::EntryPoint::GPT4_ID
+ 3000
+ else
+ 1500
+ end
+
+ { temperature: 0.4, top_p: 0.9, max_tokens: max_tokens }
end
private
- def build_message(poster_username, content)
- role = poster_username == bot_user.username ? "system" : "user"
+ def build_message(poster_username, content, system: false)
+ is_bot = poster_username == bot_user.username
- { role: role, content: content }
+ if system
+ role = "system"
+ else
+ role = is_bot ? "assistant" : "user"
+ end
+
+ { role: role, content: is_bot ? content : "#{poster_username}: #{content}" }
end
def model_for
@@ -43,15 +66,9 @@ module DiscourseAi
).dig(:choices, 0, :message, :content)
end
- def submit_prompt_and_stream_reply(prompt, &blk)
- DiscourseAi::Inference::OpenAiCompletions.perform!(
- prompt,
- model_for,
- temperature: 0.4,
- top_p: 0.9,
- max_tokens: 3000,
- &blk
- )
+ def submit_prompt_and_stream_reply(prompt, prefer_low_cost: false, &blk)
+ model = prefer_low_cost ? "gpt-3.5-turbo" : model_for
+ DiscourseAi::Inference::OpenAiCompletions.perform!(prompt, model, **reply_params, &blk)
end
def tokenize(text)
diff --git a/spec/lib/modules/ai_bot/bot_spec.rb b/spec/lib/modules/ai_bot/bot_spec.rb
index 434274b2..dbea6d2a 100644
--- a/spec/lib/modules/ai_bot/bot_spec.rb
+++ b/spec/lib/modules/ai_bot/bot_spec.rb
@@ -3,20 +3,85 @@
require_relative "../../../support/openai_completions_inference_stubs"
RSpec.describe DiscourseAi::AiBot::Bot do
- describe "#update_pm_title" do
- fab!(:topic) { Fabricate(:topic) }
- fab!(:post) { Fabricate(:post, topic: topic) }
+ fab!(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
+ fab!(:bot) { described_class.as(bot_user) }
+ fab!(:user) { Fabricate(:user) }
+ fab!(:pm) do
+ Fabricate(
+ :private_message_topic,
+ title: "This is my special PM",
+ user: user,
+ topic_allowed_users: [
+ Fabricate.build(:topic_allowed_user, user: user),
+ Fabricate.build(:topic_allowed_user, user: bot_user),
+ ],
+ )
+ end
+ fab!(:first_post) { Fabricate(:post, topic: pm, user: user, raw: "This is a reply by the user") }
+ fab!(:second_post) do
+ Fabricate(:post, topic: pm, user: user, raw: "This is a second reply by the user")
+ end
+
+ describe "#system_prompt" do
+ it "includes relevant context in system prompt" do
+ bot.system_prompt_style!(:standard)
+
+ SiteSetting.title = "My Forum"
+ SiteSetting.site_description = "My Forum Description"
+
+ system_prompt = bot.system_prompt(second_post)
+
+ expect(system_prompt).to include(SiteSetting.title)
+ expect(system_prompt).to include(SiteSetting.site_description)
+
+ expect(system_prompt).to include(user.username)
+ end
+ end
+
+ describe "#reply_to" do
+ it "can respond to !search" do
+ bot.system_prompt_style!(:simple)
+
+ expected_response = "!search test search"
+
+ prompt = bot.bot_prompt_with_topic_context(second_post)
+
+ OpenAiCompletionsInferenceStubs.stub_streamed_response(
+ prompt,
+ [{ content: expected_response }],
+ req_opts: bot.reply_params.merge(stream: true),
+ )
+
+ prompt << { role: "assistant", content: "!search test search" }
+ prompt << { role: "user", content: "results: []" }
+
+ OpenAiCompletionsInferenceStubs.stub_streamed_response(
+ prompt,
+ [{ content: "We are done now" }],
+ req_opts: bot.reply_params.merge(stream: true),
+ )
+
+ bot.reply_to(second_post)
+
+ last = second_post.topic.posts.order("id desc").first
+ expect(last.post_custom_prompt.custom_prompt.to_s).to include("We are done now")
+
+ expect(last.raw).to include("