mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-07-07 15:02:25 +00:00
FIX: Strip uploads from msg when searching for rag fragments (#1475)
This commit is contained in:
parent
a94daa14e2
commit
5ca7d5f256
@ -110,7 +110,7 @@ module DiscourseAi
|
|||||||
end
|
end
|
||||||
|
|
||||||
def user_msg(msg)
|
def user_msg(msg)
|
||||||
content = prompt.text_only(msg)
|
content = DiscourseAi::Completions::Prompt.text_only(msg)
|
||||||
user_message = { role: "USER", message: content }
|
user_message = { role: "USER", message: content }
|
||||||
user_message[:message] = "#{msg[:id]}: #{content}" if msg[:id]
|
user_message[:message] = "#{msg[:id]}: #{content}" if msg[:id]
|
||||||
user_message
|
user_message
|
||||||
|
@ -156,7 +156,7 @@ module DiscourseAi
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
{ role: "user", content: prompt.text_only(msg), images: images }
|
{ role: "user", content: DiscourseAi::Completions::Prompt.text_only(msg), images: images }
|
||||||
end
|
end
|
||||||
|
|
||||||
def model_msg(msg)
|
def model_msg(msg)
|
||||||
|
@ -69,7 +69,7 @@ module DiscourseAi
|
|||||||
end
|
end
|
||||||
|
|
||||||
def user_msg(msg)
|
def user_msg(msg)
|
||||||
user_message = { role: "user", content: prompt.text_only(msg) }
|
user_message = { role: "user", content: DiscourseAi::Completions::Prompt.text_only(msg) }
|
||||||
|
|
||||||
encoded_uploads = prompt.encoded_uploads(msg)
|
encoded_uploads = prompt.encoded_uploads(msg)
|
||||||
if encoded_uploads.present?
|
if encoded_uploads.present?
|
||||||
|
@ -8,6 +8,14 @@ module DiscourseAi
|
|||||||
attr_reader :messages, :tools, :system_message_text
|
attr_reader :messages, :tools, :system_message_text
|
||||||
attr_accessor :topic_id, :post_id, :max_pixels, :tool_choice
|
attr_accessor :topic_id, :post_id, :max_pixels, :tool_choice
|
||||||
|
|
||||||
|
def self.text_only(message)
|
||||||
|
if message[:content].is_a?(Array)
|
||||||
|
message[:content].map { |element| element if element.is_a?(String) }.compact.join
|
||||||
|
else
|
||||||
|
message[:content]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def initialize(
|
def initialize(
|
||||||
system_message_text = nil,
|
system_message_text = nil,
|
||||||
messages: [],
|
messages: [],
|
||||||
@ -146,14 +154,6 @@ module DiscourseAi
|
|||||||
[]
|
[]
|
||||||
end
|
end
|
||||||
|
|
||||||
def text_only(message)
|
|
||||||
if message[:content].is_a?(Array)
|
|
||||||
message[:content].map { |element| element if element.is_a?(String) }.compact.join
|
|
||||||
else
|
|
||||||
message[:content]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def encode_upload(upload_id)
|
def encode_upload(upload_id)
|
||||||
UploadEncoder.encode(upload_ids: [upload_id], max_pixels: max_pixels).first
|
UploadEncoder.encode(upload_ids: [upload_id], max_pixels: max_pixels).first
|
||||||
end
|
end
|
||||||
|
@ -365,7 +365,7 @@ module DiscourseAi
|
|||||||
|
|
||||||
# first response
|
# first response
|
||||||
if latest_interactions.length == 1
|
if latest_interactions.length == 1
|
||||||
consolidated_question = latest_interactions[0][:content]
|
consolidated_question = DiscourseAi::Completions::Prompt.text_only(latest_interactions[0])
|
||||||
else
|
else
|
||||||
consolidated_question =
|
consolidated_question =
|
||||||
DiscourseAi::Personas::QuestionConsolidator.consolidate_question(
|
DiscourseAi::Personas::QuestionConsolidator.consolidate_question(
|
||||||
|
@ -33,7 +33,7 @@ module DiscourseAi
|
|||||||
row = +""
|
row = +""
|
||||||
row << ((message[:type] == :user) ? "user" : "model")
|
row << ((message[:type] == :user) ? "user" : "model")
|
||||||
|
|
||||||
content = message[:content]
|
content = DiscourseAi::Completions::Prompt.text_only(message)
|
||||||
current_tokens = @llm.tokenizer.tokenize(content).length
|
current_tokens = @llm.tokenizer.tokenize(content).length
|
||||||
|
|
||||||
allowed_tokens = @max_tokens - tokens
|
allowed_tokens = @max_tokens - tokens
|
||||||
|
@ -306,11 +306,7 @@ RSpec.describe DiscourseAi::Personas::Persona do
|
|||||||
|
|
||||||
fab!(:llm_model) { Fabricate(:fake_model) }
|
fab!(:llm_model) { Fabricate(:fake_model) }
|
||||||
|
|
||||||
it "will run the question consolidator" do
|
fab!(:custom_ai_persona) do
|
||||||
context_embedding = vector_def.dimensions.times.map { rand(-1.0...1.0) }
|
|
||||||
EmbeddingsGenerationStubs.hugging_face_service(consolidated_question, context_embedding)
|
|
||||||
|
|
||||||
custom_ai_persona =
|
|
||||||
Fabricate(
|
Fabricate(
|
||||||
:ai_persona,
|
:ai_persona,
|
||||||
name: "custom",
|
name: "custom",
|
||||||
@ -318,9 +314,16 @@ RSpec.describe DiscourseAi::Personas::Persona do
|
|||||||
allowed_group_ids: [Group::AUTO_GROUPS[:trust_level_0]],
|
allowed_group_ids: [Group::AUTO_GROUPS[:trust_level_0]],
|
||||||
question_consolidator_llm_id: llm_model.id,
|
question_consolidator_llm_id: llm_model.id,
|
||||||
)
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
before do
|
||||||
|
context_embedding = vector_def.dimensions.times.map { rand(-1.0...1.0) }
|
||||||
|
EmbeddingsGenerationStubs.hugging_face_service(consolidated_question, context_embedding)
|
||||||
|
|
||||||
UploadReference.ensure_exist!(target: custom_ai_persona, upload_ids: [upload.id])
|
UploadReference.ensure_exist!(target: custom_ai_persona, upload_ids: [upload.id])
|
||||||
|
end
|
||||||
|
|
||||||
|
it "will run the question consolidator" do
|
||||||
custom_persona =
|
custom_persona =
|
||||||
DiscourseAi::Personas::Persona.find_by(id: custom_ai_persona.id, user: user).new
|
DiscourseAi::Personas::Persona.find_by(id: custom_ai_persona.id, user: user).new
|
||||||
|
|
||||||
@ -343,6 +346,36 @@ RSpec.describe DiscourseAi::Personas::Persona do
|
|||||||
expect(message).to include("the time is 1")
|
expect(message).to include("the time is 1")
|
||||||
expect(message).to include("in france?")
|
expect(message).to include("in france?")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context "when there are messages with uploads" do
|
||||||
|
let(:image100x100) { plugin_file_from_fixtures("100x100.jpg") }
|
||||||
|
let(:image_upload) do
|
||||||
|
UploadCreator.new(image100x100, "image.jpg").create_for(Discourse.system_user.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "the question consolidator works" do
|
||||||
|
custom_persona =
|
||||||
|
DiscourseAi::Personas::Persona.find_by(id: custom_ai_persona.id, user: user).new
|
||||||
|
|
||||||
|
context.messages = [
|
||||||
|
{ content: "Tell me the time", type: :user },
|
||||||
|
{ content: "the time is 1", type: :model },
|
||||||
|
{ content: ["in france?", { upload_id: image_upload.id }], type: :user },
|
||||||
|
]
|
||||||
|
|
||||||
|
DiscourseAi::Completions::Endpoints::Fake.with_fake_content(consolidated_question) do
|
||||||
|
custom_persona.craft_prompt(context).messages.first[:content]
|
||||||
|
end
|
||||||
|
|
||||||
|
message =
|
||||||
|
DiscourseAi::Completions::Endpoints::Fake.last_call[:dialect].prompt.messages.last[
|
||||||
|
:content
|
||||||
|
]
|
||||||
|
expect(message).to include("Tell me the time")
|
||||||
|
expect(message).to include("the time is 1")
|
||||||
|
expect(message).to include("in france?")
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when a persona has RAG uploads" do
|
context "when a persona has RAG uploads" do
|
||||||
|
Loading…
x
Reference in New Issue
Block a user