diff --git a/admin/assets/javascripts/discourse/routes/admin-plugins-show-discourse-ai-llms-show.js b/admin/assets/javascripts/discourse/routes/admin-plugins-show-discourse-ai-llms-show.js
index 7a9fa379..374cf7c7 100644
--- a/admin/assets/javascripts/discourse/routes/admin-plugins-show-discourse-ai-llms-show.js
+++ b/admin/assets/javascripts/discourse/routes/admin-plugins-show-discourse-ai-llms-show.js
@@ -4,7 +4,9 @@ export default DiscourseRoute.extend({
async model(params) {
const allLlms = this.modelFor("adminPlugins.show.discourse-ai-llms");
const id = parseInt(params.id, 10);
- return allLlms.findBy("id", id);
+ const record = allLlms.findBy("id", id);
+ record.provider_params = record.provider_params || {};
+ return record;
},
setupController(controller, model) {
diff --git a/app/controllers/discourse_ai/admin/ai_llms_controller.rb b/app/controllers/discourse_ai/admin/ai_llms_controller.rb
index 635a4a2f..ad6ff6b4 100644
--- a/app/controllers/discourse_ai/admin/ai_llms_controller.rb
+++ b/app/controllers/discourse_ai/admin/ai_llms_controller.rb
@@ -117,11 +117,21 @@ module DiscourseAi
new_url = params.dig(:ai_llm, :url)
permitted[:url] = new_url if permit_url && new_url
- extra_field_names = LlmModel.provider_params.dig(provider&.to_sym, :fields).to_a
- received_prov_params = params.dig(:ai_llm, :provider_params)
- permitted[:provider_params] = received_prov_params.slice(
- *extra_field_names,
- ).permit! if !extra_field_names.empty? && received_prov_params.present?
+ extra_field_names = LlmModel.provider_params.dig(provider&.to_sym)
+ if extra_field_names.present?
+ received_prov_params =
+ params.dig(:ai_llm, :provider_params)&.slice(*extra_field_names.keys)
+
+ if received_prov_params.present?
+ received_prov_params.each do |pname, value|
+ if extra_field_names[pname.to_sym] == :checkbox
+ received_prov_params[pname] = ActiveModel::Type::Boolean.new.cast(value)
+ end
+ end
+
+ permitted[:provider_params] = received_prov_params.permit!
+ end
+ end
permitted
end
diff --git a/app/models/llm_model.rb b/app/models/llm_model.rb
index 4219537b..c1c7ef7a 100644
--- a/app/models/llm_model.rb
+++ b/app/models/llm_model.rb
@@ -17,12 +17,20 @@ class LlmModel < ActiveRecord::Base
def self.provider_params
{
aws_bedrock: {
- url_editable: false,
- fields: %i[access_key_id region],
+ access_key_id: :text,
+ region: :text,
},
open_ai: {
- url_editable: true,
- fields: %i[organization],
+ organization: :text,
+ },
+ hugging_face: {
+ disable_system_prompt: :checkbox,
+ },
+ vllm: {
+ disable_system_prompt: :checkbox,
+ },
+ ollama: {
+ disable_system_prompt: :checkbox,
},
}
end
diff --git a/assets/javascripts/discourse/components/ai-llm-editor-form.gjs b/assets/javascripts/discourse/components/ai-llm-editor-form.gjs
index b5d25ba2..bb04ddd7 100644
--- a/assets/javascripts/discourse/components/ai-llm-editor-form.gjs
+++ b/assets/javascripts/discourse/components/ai-llm-editor-form.gjs
@@ -7,6 +7,7 @@ import { action, computed } from "@ember/object";
import { LinkTo } from "@ember/routing";
import { later } from "@ember/runloop";
import { inject as service } from "@ember/service";
+import { eq } from "truth-helpers";
import DButton from "discourse/components/d-button";
import DToggleSwitch from "discourse/components/d-toggle-switch";
import Avatar from "discourse/helpers/bound-avatar-template";
@@ -52,9 +53,9 @@ export default class AiLlmEditorForm extends Component {
return this.testRunning || this.testResult !== null;
}
+ @computed("args.model.provider")
get canEditURL() {
- // Explicitly false.
- return this.metaProviderParams.url_editable !== false;
+ return this.args.model.provider === "aws_bedrock";
}
get modulesUsingModel() {
@@ -227,18 +228,24 @@ export default class AiLlmEditorForm extends Component {
%%LLM_RESPONSE%%
diff --git a/discourse_automation/llm_triage.rb b/discourse_automation/llm_triage.rb index 258b1636..b8b52fb4 100644 --- a/discourse_automation/llm_triage.rb +++ b/discourse_automation/llm_triage.rb @@ -9,17 +9,7 @@ if defined?(DiscourseAutomation) triggerables %i[post_created_edited] - field :system_prompt, - component: :message, - required: true, - validator: ->(input) do - if !input.include?("%%POST%%") - I18n.t( - "discourse_automation.scriptables.llm_triage.system_prompt_missing_post_placeholder", - ) - end - end, - accepts_placeholders: true + field :system_prompt, component: :message, required: false field :search_for_text, component: :text, required: true field :model, component: :choices, diff --git a/lib/automation/llm_triage.rb b/lib/automation/llm_triage.rb index a864695f..073a45d2 100644 --- a/lib/automation/llm_triage.rb +++ b/lib/automation/llm_triage.rb @@ -21,15 +21,9 @@ module DiscourseAi raise ArgumentError, "llm_triage: no action specified!" end - post_template = +"" - post_template << "title: #{post.topic.title}\n" - post_template << "#{post.raw}" - - filled_system_prompt = system_prompt.sub("%%POST%%", post_template) - - if filled_system_prompt == system_prompt - raise ArgumentError, "llm_triage: system_prompt does not contain %%POST%% placeholder" - end + s_prompt = system_prompt.to_s.sub("%%POST%%", "") # Backwards-compat. We no longer sub this. + prompt = DiscourseAi::Completions::Prompt.new(s_prompt) + prompt.push(type: :user, content: "title: #{post.topic.title}\n#{post.raw}") result = nil @@ -37,7 +31,7 @@ module DiscourseAi result = llm.generate( - filled_system_prompt, + prompt, temperature: 0, max_tokens: 700, # ~500 words user: Discourse.system_user, diff --git a/lib/completions/dialects/open_ai_compatible.rb b/lib/completions/dialects/open_ai_compatible.rb index 33af1a14..0ed2a1d8 100644 --- a/lib/completions/dialects/open_ai_compatible.rb +++ b/lib/completions/dialects/open_ai_compatible.rb @@ -24,6 +24,18 @@ module DiscourseAi 32_000 end + def translate + translated = super + + return translated unless llm_model.lookup_custom_param("disable_system_prompt") + + system_and_user_msgs = translated.shift(2) + user_msg = system_and_user_msgs.last + user_msg[:content] = [system_and_user_msgs.first[:content], user_msg[:content]].join("\n") + + translated.unshift(user_msg) + end + private def system_msg(msg) diff --git a/spec/lib/completions/dialects/open_ai_compatible_spec.rb b/spec/lib/completions/dialects/open_ai_compatible_spec.rb new file mode 100644 index 00000000..7da85afa --- /dev/null +++ b/spec/lib/completions/dialects/open_ai_compatible_spec.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +RSpec.describe DiscourseAi::Completions::Dialects::OpenAiCompatible do + context "when system prompts are disabled" do + it "merges the system prompt into the first message" do + system_msg = "This is a system message" + user_msg = "user message" + prompt = + DiscourseAi::Completions::Prompt.new( + system_msg, + messages: [{ type: :user, content: user_msg }], + ) + + model = Fabricate(:vllm_model, provider_params: { disable_system_prompt: true }) + + translated_messages = described_class.new(prompt, model).translate + + expect(translated_messages.length).to eq(1) + expect(translated_messages).to contain_exactly( + { role: "user", content: [system_msg, user_msg].join("\n") }, + ) + end + end + + context "when system prompts are enabled" do + it "includes system and user messages separately" do + system_msg = "This is a system message" + user_msg = "user message" + prompt = + DiscourseAi::Completions::Prompt.new( + system_msg, + messages: [{ type: :user, content: user_msg }], + ) + + model = Fabricate(:vllm_model, provider_params: { disable_system_prompt: false }) + + translated_messages = described_class.new(prompt, model).translate + + expect(translated_messages.length).to eq(2) + expect(translated_messages).to contain_exactly( + { role: "system", content: system_msg }, + { role: "user", content: user_msg }, + ) + end + end +end diff --git a/spec/requests/admin/ai_llms_controller_spec.rb b/spec/requests/admin/ai_llms_controller_spec.rb index f848570d..6abf4433 100644 --- a/spec/requests/admin/ai_llms_controller_spec.rb +++ b/spec/requests/admin/ai_llms_controller_spec.rb @@ -136,6 +136,24 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do expect(created_model.lookup_custom_param("region")).to eq("us-east-1") expect(created_model.lookup_custom_param("access_key_id")).to eq("test") end + + it "supports boolean values" do + post "/admin/plugins/discourse-ai/ai-llms.json", + params: { + ai_llm: + valid_attrs.merge( + provider: "vllm", + provider_params: { + disable_system_prompt: true, + }, + ), + } + + created_model = LlmModel.last + + expect(response.status).to eq(201) + expect(created_model.lookup_custom_param("disable_system_prompt")).to eq(true) + end end end