FEATURE: RAG search within tools (#802)

This allows custom tools access to uploads and sophisticated searches using embedding.

It introduces:

 - A shared front end for listing and uploading files (shared with personas)
 -  Backend implementation of index.search function within a custom tool.

Custom tools now may search through uploaded files

function invoke(params) {
   return index.search(params.query)
}

This means that RAG implementers now may preload tools with knowledge and have high fidelity over
the search.

The search function support

    specifying max results
    specifying a subset of files to search (from uploads)

Also

 - Improved documentation for tools (when creating a tool a preamble explains all the functionality)
  - uploads were a bit finicky, fixed an edge case where the UI would not show them as updated
This commit is contained in:
Sam 2024-09-30 16:27:50 +09:00 committed by GitHub
parent 18ecc843e5
commit 5cbc9190eb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 647 additions and 285 deletions

View File

@ -5,8 +5,7 @@ module DiscourseAi
class AiPersonasController < ::Admin::AdminController class AiPersonasController < ::Admin::AdminController
requires_plugin ::DiscourseAi::PLUGIN_NAME requires_plugin ::DiscourseAi::PLUGIN_NAME
before_action :find_ai_persona, before_action :find_ai_persona, only: %i[show update destroy create_user]
only: %i[show update destroy create_user indexing_status_check]
def index def index
ai_personas = ai_personas =
@ -75,37 +74,6 @@ module DiscourseAi
end end
end end
def upload_file
file = params[:file] || params[:files].first
if !SiteSetting.ai_embeddings_enabled?
raise Discourse::InvalidAccess.new("Embeddings not enabled")
end
validate_extension!(file.original_filename)
validate_file_size!(file.tempfile.size)
hijack do
upload =
UploadCreator.new(
file.tempfile,
file.original_filename,
type: "discourse_ai_rag_upload",
skip_validations: true,
).create_for(current_user.id)
if upload.persisted?
render json: UploadSerializer.new(upload)
else
render json: failed_json.merge(errors: upload.errors.full_messages), status: 422
end
end
end
def indexing_status_check
render json: RagDocumentFragment.indexing_status(@ai_persona, @ai_persona.uploads)
end
private private
def find_ai_persona def find_ai_persona
@ -163,31 +131,6 @@ module DiscourseAi
end end
end end
end end
def validate_extension!(filename)
extension = File.extname(filename)[1..-1] || ""
authorized_extensions = %w[txt md]
if !authorized_extensions.include?(extension)
raise Discourse::InvalidParameters.new(
I18n.t(
"upload.unauthorized",
authorized_extensions: authorized_extensions.join(" "),
),
)
end
end
def validate_file_size!(filesize)
max_size_bytes = 20.megabytes
if filesize > max_size_bytes
raise Discourse::InvalidParameters.new(
I18n.t(
"upload.attachments.too_large_humanized",
max_size: ActiveSupport::NumberHelper.number_to_human_size(max_size_bytes),
),
)
end
end
end end
end end
end end

View File

@ -17,10 +17,11 @@ module DiscourseAi
end end
def create def create
ai_tool = AiTool.new(ai_tool_params) ai_tool = AiTool.new(ai_tool_params.except(:rag_uploads))
ai_tool.created_by_id = current_user.id ai_tool.created_by_id = current_user.id
if ai_tool.save if ai_tool.save
RagDocumentFragment.link_target_and_uploads(ai_tool, attached_upload_ids)
render_serialized(ai_tool, AiCustomToolSerializer, status: :created) render_serialized(ai_tool, AiCustomToolSerializer, status: :created)
else else
render_json_error ai_tool render_json_error ai_tool
@ -28,7 +29,8 @@ module DiscourseAi
end end
def update def update
if @ai_tool.update(ai_tool_params) if @ai_tool.update(ai_tool_params.except(:rag_uploads))
RagDocumentFragment.update_target_uploads(@ai_tool, attached_upload_ids)
render_serialized(@ai_tool, AiCustomToolSerializer) render_serialized(@ai_tool, AiCustomToolSerializer)
else else
render_json_error @ai_tool render_json_error @ai_tool
@ -71,6 +73,10 @@ module DiscourseAi
private private
def attached_upload_ids
ai_tool_params[:rag_uploads].to_a.map { |h| h[:id] }
end
def find_ai_tool def find_ai_tool
@ai_tool = AiTool.find(params[:id]) @ai_tool = AiTool.find(params[:id])
end end
@ -81,6 +87,9 @@ module DiscourseAi
:description, :description,
:script, :script,
:summary, :summary,
:rag_chunk_tokens,
:rag_chunk_overlap_tokens,
rag_uploads: [:id],
parameters: [:name, :type, :description, :required, enum: []], parameters: [:name, :type, :description, :required, enum: []],
) )
end end

View File

@ -0,0 +1,75 @@
# frozen_string_literal: true
module DiscourseAi
module Admin
class RagDocumentFragmentsController < ::Admin::AdminController
requires_plugin ::DiscourseAi::PLUGIN_NAME
def indexing_status_check
if params[:target_type] == "AiPersona"
@target = AiPersona.find(params[:target_id])
elsif params[:target_type] == "AiTool"
@target = AiTool.find(params[:target_id])
else
raise Discourse::InvalidParameters.new("Invalid target type")
end
render json: RagDocumentFragment.indexing_status(@target, @target.uploads)
end
def upload_file
file = params[:file] || params[:files].first
if !SiteSetting.ai_embeddings_enabled?
raise Discourse::InvalidAccess.new("Embeddings not enabled")
end
validate_extension!(file.original_filename)
validate_file_size!(file.tempfile.size)
hijack do
upload =
UploadCreator.new(
file.tempfile,
file.original_filename,
type: "discourse_ai_rag_upload",
skip_validations: true,
).create_for(current_user.id)
if upload.persisted?
render json: UploadSerializer.new(upload)
else
render json: failed_json.merge(errors: upload.errors.full_messages), status: 422
end
end
end
private
def validate_extension!(filename)
extension = File.extname(filename)[1..-1] || ""
authorized_extensions = %w[txt md]
if !authorized_extensions.include?(extension)
raise Discourse::InvalidParameters.new(
I18n.t(
"upload.unauthorized",
authorized_extensions: authorized_extensions.join(" "),
),
)
end
end
def validate_file_size!(filesize)
max_size_bytes = 20.megabytes
if filesize > max_size_bytes
raise Discourse::InvalidParameters.new(
I18n.t(
"upload.attachments.too_large_humanized",
max_size: ActiveSupport::NumberHelper.number_to_human_size(max_size_bytes),
),
)
end
end
end
end
end

View File

@ -126,7 +126,9 @@ module ::Jobs
while overlap_token_ids.present? while overlap_token_ids.present?
begin begin
overlap = tokenizer.decode(overlap_token_ids) + split_char padding = split_char
padding = " " if padding.empty?
overlap = tokenizer.decode(overlap_token_ids) + padding
break if overlap.encoding == Encoding::UTF_8 break if overlap.encoding == Encoding::UTF_8
rescue StandardError rescue StandardError
# it is possible that we truncated mid char # it is possible that we truncated mid char
@ -135,7 +137,7 @@ module ::Jobs
end end
# remove first word it is probably truncated # remove first word it is probably truncated
overlap = overlap.split(" ", 2).last overlap = overlap.split(/\s/, 2).last.to_s.lstrip
end end
end end

View File

@ -7,6 +7,10 @@ class AiTool < ActiveRecord::Base
validates :script, presence: true, length: { maximum: 100_000 } validates :script, presence: true, length: { maximum: 100_000 }
validates :created_by_id, presence: true validates :created_by_id, presence: true
belongs_to :created_by, class_name: "User" belongs_to :created_by, class_name: "User"
has_many :rag_document_fragments, dependent: :destroy, as: :target
has_many :upload_references, as: :target, dependent: :destroy
has_many :uploads, through: :upload_references
before_update :regenerate_rag_fragments
def signature def signature
{ name: name, description: description, parameters: parameters.map(&:symbolize_keys) } { name: name, description: description, parameters: parameters.map(&:symbolize_keys) }
@ -28,6 +32,82 @@ class AiTool < ActiveRecord::Base
AiPersona.persona_cache.flush! AiPersona.persona_cache.flush!
end end
def regenerate_rag_fragments
if rag_chunk_tokens_changed? || rag_chunk_overlap_tokens_changed?
RagDocumentFragment.where(target: self).delete_all
end
end
def self.preamble
<<~JS
/**
* Tool API Quick Reference
*
* Entry Functions
*
* invoke(parameters): Main function. Receives parameters (Object). Must return a JSON-serializable value.
* Example:
* function invoke(parameters) { return "result"; }
*
* details(): Optional. Returns a string describing the tool.
* Example:
* function details() { return "Tool description."; }
*
* Provided Objects
*
* 1. http
* http.get(url, options?): Performs an HTTP GET request.
* Parameters:
* url (string): The request URL.
* options (Object, optional):
* headers (Object): Request headers.
* Returns:
* { status: number, body: string }
*
* http.post(url, options?): Performs an HTTP POST request.
* Parameters:
* url (string): The request URL.
* options (Object, optional):
* headers (Object): Request headers.
* body (string): Request body.
* Returns:
* { status: number, body: string }
*
* Note: Max 20 HTTP requests per execution.
*
* 2. llm
* llm.truncate(text, length): Truncates text to a specified token length.
* Parameters:
* text (string): Text to truncate.
* length (number): Max tokens.
* Returns:
* Truncated string.
*
* 3. index
* index.search(query, options?): Searches indexed documents.
* Parameters:
* query (string): Search query.
* options (Object, optional):
* filenames (Array): Limit search to specific files.
* limit (number): Max fragments (up to 200).
* Returns:
* Array of { fragment: string, metadata: string }
*
* Constraints
*
* Execution Time: 2000ms
* Memory: 10MB
* HTTP Requests: 20 per execution
* Exceeding limits will result in errors or termination.
*
* Security
*
* Sandboxed Environment: No access to system or global objects.
* No File System Access: Cannot read or write files.
*/
JS
end
def self.presets def self.presets
[ [
{ {
@ -38,6 +118,7 @@ class AiTool < ActiveRecord::Base
{ name: "url", type: "string", required: true, description: "The URL to browse" }, { name: "url", type: "string", required: true, description: "The URL to browse" },
], ],
script: <<~SCRIPT, script: <<~SCRIPT,
#{preamble}
let url; let url;
function invoke(p) { function invoke(p) {
url = p.url; url = p.url;
@ -70,6 +151,7 @@ class AiTool < ActiveRecord::Base
{ name: "amount", type: "number", description: "Amount to convert eg: 123.45" }, { name: "amount", type: "number", description: "Amount to convert eg: 123.45" },
], ],
script: <<~SCRIPT, script: <<~SCRIPT,
#{preamble}
// note: this script uses the open.er-api.com service, it is only updated // note: this script uses the open.er-api.com service, it is only updated
// once every 24 hours, for more up to date rates see: https://www.exchangerate-api.com // once every 24 hours, for more up to date rates see: https://www.exchangerate-api.com
function invoke(params) { function invoke(params) {
@ -118,6 +200,7 @@ class AiTool < ActiveRecord::Base
}, },
], ],
script: <<~SCRIPT, script: <<~SCRIPT,
#{preamble}
function invoke(params) { function invoke(params) {
const apiKey = 'YOUR_ALPHAVANTAGE_API_KEY'; // Replace with your actual API key const apiKey = 'YOUR_ALPHAVANTAGE_API_KEY'; // Replace with your actual API key
const url = `https://www.alphavantage.co/query?function=GLOBAL_QUOTE&symbol=${params.symbol}&apikey=${apiKey}`; const url = `https://www.alphavantage.co/query?function=GLOBAL_QUOTE&symbol=${params.symbol}&apikey=${apiKey}`;
@ -154,6 +237,7 @@ class AiTool < ActiveRecord::Base
summary: "Get real-time stock quotes using AlphaVantage API", summary: "Get real-time stock quotes using AlphaVantage API",
}, },
{ preset_id: "empty_tool", script: <<~SCRIPT }, { preset_id: "empty_tool", script: <<~SCRIPT },
#{preamble}
function invoke(params) { function invoke(params) {
// logic here // logic here
return params; return params;
@ -173,14 +257,16 @@ end
# #
# Table name: ai_tools # Table name: ai_tools
# #
# id :bigint not null, primary key # id :bigint not null, primary key
# name :string not null # name :string not null
# description :string not null # description :string not null
# summary :string not null # summary :string not null
# parameters :jsonb not null # parameters :jsonb not null
# script :text not null # script :text not null
# created_by_id :integer not null # created_by_id :integer not null
# enabled :boolean default(TRUE), not null # enabled :boolean default(TRUE), not null
# created_at :datetime not null # created_at :datetime not null
# updated_at :datetime not null # updated_at :datetime not null
# rag_chunk_tokens :integer default(374), not null
# rag_chunk_overlap_tokens :integer default(10), not null
# #

View File

@ -72,11 +72,7 @@ class RagDocumentFragment < ActiveRecord::Base
end end
def publish_status(upload, status) def publish_status(upload, status)
MessageBus.publish( MessageBus.publish("/discourse-ai/rag/#{upload.id}", status, user_ids: [upload.user_id])
"/discourse-ai/ai-persona-rag/#{upload.id}",
status,
user_ids: [upload.user_id],
)
end end
end end
end end

View File

@ -7,9 +7,17 @@ class AiCustomToolSerializer < ApplicationSerializer
:summary, :summary,
:parameters, :parameters,
:script, :script,
:rag_chunk_tokens,
:rag_chunk_overlap_tokens,
:created_by_id, :created_by_id,
:created_at, :created_at,
:updated_at :updated_at
self.root = "ai_tool" self.root = "ai_tool"
has_many :rag_uploads, serializer: UploadSerializer, embed: :object
def rag_uploads
object.uploads
end
end end

View File

@ -8,6 +8,9 @@ const CREATE_ATTRIBUTES = [
"parameters", "parameters",
"script", "script",
"summary", "summary",
"rag_uploads",
"rag_chunk_tokens",
"rag_chunk_overlap_tokens",
"enabled", "enabled",
]; ];

View File

@ -23,7 +23,8 @@ import DTooltip from "float-kit/components/d-tooltip";
import AiLlmSelector from "./ai-llm-selector"; import AiLlmSelector from "./ai-llm-selector";
import AiPersonaToolOptions from "./ai-persona-tool-options"; import AiPersonaToolOptions from "./ai-persona-tool-options";
import AiToolSelector from "./ai-tool-selector"; import AiToolSelector from "./ai-tool-selector";
import PersonaRagUploader from "./persona-rag-uploader"; import RagOptions from "./rag-options";
import RagUploader from "./rag-uploader";
export default class PersonaEditor extends Component { export default class PersonaEditor extends Component {
@service router; @service router;
@ -38,7 +39,6 @@ export default class PersonaEditor extends Component {
@tracked showDelete = false; @tracked showDelete = false;
@tracked maxPixelsValue = null; @tracked maxPixelsValue = null;
@tracked ragIndexingStatuses = null; @tracked ragIndexingStatuses = null;
@tracked showIndexingOptions = false;
get chatPluginEnabled() { get chatPluginEnabled() {
return this.siteSettings.chat_enabled; return this.siteSettings.chat_enabled;
@ -53,13 +53,6 @@ export default class PersonaEditor extends Component {
); );
} }
@action
toggleIndexingOptions(event) {
this.showIndexingOptions = !this.showIndexingOptions;
event.preventDefault();
event.stopPropagation();
}
findClosestPixelValue(pixels) { findClosestPixelValue(pixels) {
let value = "high"; let value = "high";
this.maxPixelValues.forEach((info) => { this.maxPixelValues.forEach((info) => {
@ -81,12 +74,6 @@ export default class PersonaEditor extends Component {
]; ];
} }
get indexingOptionsText() {
return this.showIndexingOptions
? I18n.t("discourse_ai.ai_persona.hide_indexing_options")
: I18n.t("discourse_ai.ai_persona.show_indexing_options");
}
@action @action
async updateAllGroups() { async updateAllGroups() {
this.allGroups = await Group.findAll(); this.allGroups = await Group.findAll();
@ -487,54 +474,13 @@ export default class PersonaEditor extends Component {
{{/if}} {{/if}}
{{#if this.siteSettings.ai_embeddings_enabled}} {{#if this.siteSettings.ai_embeddings_enabled}}
<div class="control-group"> <div class="control-group">
<PersonaRagUploader <RagUploader
@persona={{this.editingModel}} @target={{this.editingModel}}
@updateUploads={{this.updateUploads}} @updateUploads={{this.updateUploads}}
@onRemove={{this.removeUpload}} @onRemove={{this.removeUpload}}
/> />
{{#if this.editingModel.rag_uploads}}
<a
href="#"
class="ai-persona-editor__indexing-options"
{{on "click" this.toggleIndexingOptions}}
>{{this.indexingOptionsText}}</a>
{{/if}}
</div> </div>
{{#if this.showIndexingOptions}} <RagOptions @model={{this.editingModel}}>
<div class="control-group">
<label>{{I18n.t "discourse_ai.ai_persona.rag_chunk_tokens"}}</label>
<Input
@type="number"
step="any"
lang="en"
class="ai-persona-editor__rag_chunk_tokens"
@value={{this.editingModel.rag_chunk_tokens}}
/>
<DTooltip
@icon="question-circle"
@content={{I18n.t
"discourse_ai.ai_persona.rag_chunk_tokens_help"
}}
/>
</div>
<div class="control-group">
<label>{{I18n.t
"discourse_ai.ai_persona.rag_chunk_overlap_tokens"
}}</label>
<Input
@type="number"
step="any"
lang="en"
class="ai-persona-editor__rag_chunk_overlap_tokens"
@value={{this.editingModel.rag_chunk_overlap_tokens}}
/>
<DTooltip
@icon="question-circle"
@content={{I18n.t
"discourse_ai.ai_persona.rag_chunk_overlap_tokens_help"
}}
/>
</div>
<div class="control-group"> <div class="control-group">
<label>{{I18n.t <label>{{I18n.t
"discourse_ai.ai_persona.rag_conversation_chunks" "discourse_ai.ai_persona.rag_conversation_chunks"
@ -553,7 +499,6 @@ export default class PersonaEditor extends Component {
}} }}
/> />
</div> </div>
<div class="control-group"> <div class="control-group">
<label>{{I18n.t <label>{{I18n.t
"discourse_ai.ai_persona.question_consolidator_llm" "discourse_ai.ai_persona.question_consolidator_llm"
@ -571,7 +516,7 @@ export default class PersonaEditor extends Component {
}} }}
/> />
</div> </div>
{{/if}} </RagOptions>
{{/if}} {{/if}}
<div class="control-group ai-persona-editor__action_panel"> <div class="control-group ai-persona-editor__action_panel">
<DButton <DButton

View File

@ -16,6 +16,8 @@ import I18n from "discourse-i18n";
import ComboBox from "select-kit/components/combo-box"; import ComboBox from "select-kit/components/combo-box";
import AiToolParameterEditor from "./ai-tool-parameter-editor"; import AiToolParameterEditor from "./ai-tool-parameter-editor";
import AiToolTestModal from "./modal/ai-tool-test-modal"; import AiToolTestModal from "./modal/ai-tool-test-modal";
import RagOptions from "./rag-options";
import RagUploader from "./rag-uploader";
const ACE_EDITOR_MODE = "javascript"; const ACE_EDITOR_MODE = "javascript";
const ACE_EDITOR_THEME = "chrome"; const ACE_EDITOR_THEME = "chrome";
@ -26,6 +28,7 @@ export default class AiToolEditor extends Component {
@service modal; @service modal;
@service toasts; @service toasts;
@service store; @service store;
@service siteSettings;
@tracked isSaving = false; @tracked isSaving = false;
@tracked editingModel = null; @tracked editingModel = null;
@ -60,6 +63,19 @@ export default class AiToolEditor extends Component {
this.showDelete = false; this.showDelete = false;
} }
@action
updateUploads(uploads) {
this.editingModel.rag_uploads = uploads;
}
@action
removeUpload(upload) {
this.editingModel.rag_uploads.removeObject(upload);
if (!this.args.model.isNew) {
this.save();
}
}
@action @action
async save() { async save() {
this.isSaving = true; this.isSaving = true;
@ -70,7 +86,10 @@ export default class AiToolEditor extends Component {
"description", "description",
"parameters", "parameters",
"script", "script",
"summary" "summary",
"rag_uploads",
"rag_chunk_tokens",
"rag_chunk_overlap_tokens"
); );
await this.args.model.save(data); await this.args.model.save(data);
@ -201,6 +220,17 @@ export default class AiToolEditor extends Component {
/> />
</div> </div>
{{#if this.siteSettings.ai_embeddings_enabled}}
<div class="control-group">
<RagUploader
@target={{this.editingModel}}
@updateUploads={{this.updateUploads}}
@onRemove={{this.removeUpload}}
/>
</div>
<RagOptions @model={{this.editingModel}} />
{{/if}}
<div class="control-group ai-tool-editor__action_panel"> <div class="control-group ai-tool-editor__action_panel">
<DButton <DButton
@action={{this.openTestModal}} @action={{this.openTestModal}}

View File

@ -0,0 +1,70 @@
import Component from "@glimmer/component";
import { tracked } from "@glimmer/tracking";
import { Input } from "@ember/component";
import { on } from "@ember/modifier";
import { action } from "@ember/object";
import DTooltip from "discourse/components/d-tooltip";
import I18n from "discourse-i18n";
export default class RagOptions extends Component {
@tracked showIndexingOptions = false;
@action
toggleIndexingOptions(event) {
this.showIndexingOptions = !this.showIndexingOptions;
event.preventDefault();
event.stopPropagation();
}
get indexingOptionsText() {
return this.showIndexingOptions
? I18n.t("discourse_ai.rag.options.hide_indexing_options")
: I18n.t("discourse_ai.rag.options.show_indexing_options");
}
<template>
{{#if @model.rag_uploads}}
<a
href="#"
class="rag-options__indexing-options"
{{on "click" this.toggleIndexingOptions}}
>{{this.indexingOptionsText}}</a>
{{/if}}
{{#if this.showIndexingOptions}}
<div class="control-group">
<label>{{I18n.t "discourse_ai.rag.options.rag_chunk_tokens"}}</label>
<Input
@type="number"
step="any"
lang="en"
class="rag-options__rag_chunk_tokens"
@value={{@model.rag_chunk_tokens}}
/>
<DTooltip
@icon="question-circle"
@content={{I18n.t "discourse_ai.rag.options.rag_chunk_tokens_help"}}
/>
</div>
<div class="control-group">
<label>{{I18n.t
"discourse_ai.rag.options.rag_chunk_overlap_tokens"
}}</label>
<Input
@type="number"
step="any"
lang="en"
class="rag-options__rag_chunk_overlap_tokens"
@value={{@model.rag_chunk_overlap_tokens}}
/>
<DTooltip
@icon="question-circle"
@content={{I18n.t
"discourse_ai.rag.options.rag_chunk_overlap_tokens_help"
}}
/>
</div>
{{yield}}
{{/if}}
</template>
}

View File

@ -9,20 +9,17 @@ import I18n from "discourse-i18n";
export default class RagUploadProgress extends Component { export default class RagUploadProgress extends Component {
@service messageBus; @service messageBus;
@tracked updatedProgress = null; @tracked updatedProgress = null;
willDestroy() { willDestroy() {
super.willDestroy(...arguments); super.willDestroy(...arguments);
this.messageBus.unsubscribe( this.messageBus.unsubscribe(`/discourse-ai/rag/${this.args.upload.id}`);
`/discourse-ai/ai-persona-rag/${this.args.upload.id}`
);
} }
@action @action
trackProgress() { trackProgress() {
this.messageBus.subscribe( this.messageBus.subscribe(
`/discourse-ai/ai-persona-rag/${this.args.upload.id}`, `/discourse-ai/rag/${this.args.upload.id}`,
this.onIndexingUpdate this.onIndexingUpdate
); );
} }
@ -32,8 +29,9 @@ export default class RagUploadProgress extends Component {
// Order not guaranteed. Discard old updates. // Order not guaranteed. Discard old updates.
if ( if (
!this.updatedProgress || !this.updatedProgress ||
data.total === 0 || this.updatedProgress.left === 0 ||
this.updatedProgress.left > data.left this.updatedProgress.left > data.left ||
data.total === data.indexed
) { ) {
this.updatedProgress = data; this.updatedProgress = data;
} }
@ -64,26 +62,23 @@ export default class RagUploadProgress extends Component {
} }
<template> <template>
<td <td class="rag-uploader__upload-status" {{didInsert this.trackProgress}}>
class="persona-rag-uploader__upload-status"
{{didInsert this.trackProgress}}
>
{{#if this.progress}} {{#if this.progress}}
{{#if this.fullyIndexed}} {{#if this.fullyIndexed}}
<span class="indexed"> <span class="indexed">
{{icon "check"}} {{icon "check"}}
{{I18n.t "discourse_ai.ai_persona.uploads.indexed"}} {{I18n.t "discourse_ai.rag.uploads.indexed"}}
</span> </span>
{{else}} {{else}}
<span class="indexing"> <span class="indexing">
{{icon "robot"}} {{icon "robot"}}
{{I18n.t "discourse_ai.ai_persona.uploads.indexing"}} {{I18n.t "discourse_ai.rag.uploads.indexing"}}
{{this.calculateProgress}}% {{this.calculateProgress}}%
</span> </span>
{{/if}} {{/if}}
{{else}} {{else}}
<span class="uploaded">{{I18n.t <span class="uploaded">{{I18n.t
"discourse_ai.ai_persona.uploads.uploaded" "discourse_ai.rag.uploads.uploaded"
}}</span> }}</span>
{{/if}} {{/if}}
</td> </td>

View File

@ -12,18 +12,16 @@ import discourseDebounce from "discourse-common/lib/debounce";
import I18n from "discourse-i18n"; import I18n from "discourse-i18n";
import RagUploadProgress from "./rag-upload-progress"; import RagUploadProgress from "./rag-upload-progress";
export default class PersonaRagUploader extends Component.extend( export default class RagUploader extends Component.extend(UppyUploadMixin) {
UppyUploadMixin
) {
@service appEvents; @service appEvents;
@tracked term = null; @tracked term = null;
@tracked filteredUploads = null; @tracked filteredUploads = null;
@tracked ragIndexingStatuses = null; @tracked ragIndexingStatuses = null;
@tracked ragUploads = null; @tracked ragUploads = null;
id = "discourse-ai-persona-rag-uploader"; id = "discourse-ai-rag-uploader";
maxFiles = 20; maxFiles = 20;
uploadUrl = "/admin/plugins/discourse-ai/ai-personas/files/upload"; uploadUrl = "/admin/plugins/discourse-ai/rag-document-fragments/files/upload";
preventDirectS3Uploads = true; preventDirectS3Uploads = true;
didReceiveAttrs() { didReceiveAttrs() {
@ -33,12 +31,14 @@ export default class PersonaRagUploader extends Component.extend(
this._uppyInstance?.cancelAll(); this._uppyInstance?.cancelAll();
} }
this.ragUploads = this.persona?.rag_uploads || []; this.ragUploads = this.target?.rag_uploads || [];
this.filteredUploads = this.ragUploads; this.filteredUploads = this.ragUploads;
if (this.ragUploads?.length && this.persona?.id) { const targetName = this.target?.constructor?.name;
if (this.ragUploads?.length && this.target?.id) {
ajax( ajax(
`/admin/plugins/discourse-ai/ai-personas/${this.persona.id}/files/status.json` `/admin/plugins/discourse-ai/rag-document-fragments/files/status.json?target_type=${targetName}&target_id=${this.target.id}`
).then((statuses) => { ).then((statuses) => {
this.set("ragIndexingStatuses", statuses); this.set("ragIndexingStatuses", statuses);
}); });
@ -47,7 +47,7 @@ export default class PersonaRagUploader extends Component.extend(
this.appEvents.on( this.appEvents.on(
`upload-mixin:${this.id}:all-uploads-complete`, `upload-mixin:${this.id}:all-uploads-complete`,
this, this,
"_updatePersonaWithUploads" "_updateTargetWithUploads"
); );
} }
@ -56,18 +56,18 @@ export default class PersonaRagUploader extends Component.extend(
this.appEvents.off( this.appEvents.off(
`upload-mixin:${this.id}:all-uploads-complete`, `upload-mixin:${this.id}:all-uploads-complete`,
this, this,
"_updatePersonaWithUploads" "_updateTargetWithUploads"
); );
} }
_updatePersonaWithUploads() { _updateTargetWithUploads() {
this.updateUploads(this.ragUploads); this.updateUploads(this.ragUploads);
} }
uploadDone(uploadedFile) { uploadDone(uploadedFile) {
const newUpload = uploadedFile.upload; const newUpload = uploadedFile.upload;
newUpload.status = "uploaded"; newUpload.status = "uploaded";
newUpload.statusText = I18n.t("discourse_ai.ai_persona.uploads.uploaded"); newUpload.statusText = I18n.t("discourse_ai.rag.uploads.uploaded");
this.ragUploads.pushObject(newUpload); this.ragUploads.pushObject(newUpload);
this.debouncedSearch(); this.debouncedSearch();
} }
@ -112,20 +112,17 @@ export default class PersonaRagUploader extends Component.extend(
} }
<template> <template>
<div class="persona-rag-uploader"> <div class="rag-uploader">
<h3>{{I18n.t "discourse_ai.ai_persona.uploads.title"}}</h3> <h3>{{I18n.t "discourse_ai.rag.uploads.title"}}</h3>
<p>{{I18n.t "discourse_ai.ai_persona.uploads.description"}}</p> <p>{{I18n.t "discourse_ai.rag.uploads.description"}}</p>
{{#if this.ragUploads}} {{#if this.ragUploads}}
<div class="persona-rag-uploader__search-input-container"> <div class="rag-uploader__search-input-container">
<div class="persona-rag-uploader__search-input"> <div class="rag-uploader__search-input">
{{icon {{icon "search" class="rag-uploader__search-input__search-icon"}}
"search"
class="persona-rag-uploader__search-input__search-icon"
}}
<Input <Input
class="persona-rag-uploader__search-input__input" class="rag-uploader__search-input__input"
placeholder={{I18n.t "discourse_ai.ai_persona.uploads.filter"}} placeholder={{I18n.t "discourse_ai.rag.uploads.filter"}}
@value={{this.term}} @value={{this.term}}
{{on "keyup" this.debouncedSearch}} {{on "keyup" this.debouncedSearch}}
/> />
@ -133,24 +130,22 @@ export default class PersonaRagUploader extends Component.extend(
</div> </div>
{{/if}} {{/if}}
<table class="persona-rag-uploader__uploads-list"> <table class="rag-uploader__uploads-list">
<tbody> <tbody>
{{#each this.filteredUploads as |upload|}} {{#each this.filteredUploads as |upload|}}
<tr> <tr>
<td> <td>
<span class="persona-rag-uploader__rag-file-icon">{{icon <span class="rag-uploader__rag-file-icon">{{icon "file"}}</span>
"file"
}}</span>
{{upload.original_filename}} {{upload.original_filename}}
</td> </td>
<RagUploadProgress <RagUploadProgress
@upload={{upload}} @upload={{upload}}
@ragIndexingStatuses={{this.ragIndexingStatuses}} @ragIndexingStatuses={{this.ragIndexingStatuses}}
/> />
<td class="persona-rag-uploader__remove-file"> <td class="rag-uploader__remove-file">
<DButton <DButton
@icon="times" @icon="times"
@title="discourse_ai.ai_persona.uploads.remove" @title="discourse_ai.rag.uploads.remove"
@action={{fn this.removeUpload upload}} @action={{fn this.removeUpload upload}}
@class="btn-flat" @class="btn-flat"
/> />
@ -159,19 +154,19 @@ export default class PersonaRagUploader extends Component.extend(
{{/each}} {{/each}}
{{#each this.inProgressUploads as |upload|}} {{#each this.inProgressUploads as |upload|}}
<tr> <tr>
<td><span class="persona-rag-uploader__rag-file-icon">{{icon <td><span class="rag-uploader__rag-file-icon">{{icon
"file" "file"
}}</span> }}</span>
{{upload.original_filename}}</td> {{upload.original_filename}}</td>
<td class="persona-rag-uploader__upload-status"> <td class="rag-uploader__upload-status">
<div class="spinner small"></div> <div class="spinner small"></div>
<span>{{I18n.t "discourse_ai.ai_persona.uploads.uploading"}} <span>{{I18n.t "discourse_ai.rag.uploads.uploading"}}
{{upload.uploadProgress}}%</span> {{upload.uploadProgress}}%</span>
</td> </td>
<td class="persona-rag-uploader__remove-file"> <td class="rag-uploader__remove-file">
<DButton <DButton
@icon="times" @icon="times"
@title="discourse_ai.ai_persona.uploads.remove" @title="discourse_ai.rag.uploads.remove"
@action={{fn this.cancelUploading upload}} @action={{fn this.cancelUploading upload}}
@class="btn-flat" @class="btn-flat"
/> />
@ -189,9 +184,9 @@ export default class PersonaRagUploader extends Component.extend(
accept=".txt,.md" accept=".txt,.md"
/> />
<DButton <DButton
@label="discourse_ai.ai_persona.uploads.button" @label="discourse_ai.rag.uploads.button"
@icon="plus" @icon="plus"
@title="discourse_ai.ai_persona.uploads.button" @title="discourse_ai.rag.uploads.button"
@action={{this.submitFiles}} @action={{this.submitFiles}}
class="btn-default" class="btn-default"
/> />

View File

@ -74,82 +74,85 @@
display: flex; display: flex;
align-items: center; align-items: center;
} }
}
.rag-options {
&__indexing-options { &__indexing-options {
display: block; display: block;
margin-top: 1em; margin-top: 1em;
} margin-bottom: 1em;
}
.persona-rag-uploader { }
width: 500px;
.rag-uploader {
&__search-input { width: 500px;
display: flex;
align-items: center; &__search-input {
border: 1px solid var(--primary-400); display: flex;
width: 100%; align-items: center;
box-sizing: border-box; border: 1px solid var(--primary-400);
height: 35px; width: 100%;
padding: 0 0.5rem; box-sizing: border-box;
height: 35px;
&:focus, padding: 0 0.5rem;
&:focus-within {
@include default-focus(); &:focus,
} &:focus-within {
@include default-focus();
&-container { }
display: flex;
flex-grow: 1; &-container {
} display: flex;
flex-grow: 1;
&__search-icon { }
background: none !important;
color: var(--primary-medium); &__search-icon {
} background: none !important;
color: var(--primary-medium);
&__input { }
width: 100% !important;
} &__input {
width: 100% !important;
&__input, }
&__input:focus {
margin: 0 !important; &__input,
border: 0 !important; &__input:focus {
appearance: none !important; margin: 0 !important;
outline: none !important; border: 0 !important;
background: none !important; appearance: none !important;
} outline: none !important;
} background: none !important;
}
&__uploads-list { }
margin-bottom: 20px;
&__uploads-list {
tbody { margin-bottom: 20px;
border-top: none;
} tbody {
} border-top: none;
}
&__upload-status { }
text-align: right;
padding-right: 0; &__upload-status {
text-align: right;
.indexed { padding-right: 0;
color: var(--success);
} .indexed {
color: var(--success);
.uploaded, }
.indexing {
color: var(--primary-low-mid); .uploaded,
} .indexing {
} color: var(--primary-low-mid);
}
&__remove-file { }
text-align: right;
padding-left: 0; &__remove-file {
} text-align: right;
padding-left: 0;
&__rag-file-icon { }
margin-right: 5px;
} &__rag-file-icon {
margin-right: 5px;
} }
} }

View File

@ -139,8 +139,6 @@ en:
question_consolidator_llm: Language Model for Question Consolidator question_consolidator_llm: Language Model for Question Consolidator
question_consolidator_llm_help: The language model to use for the question consolidator, you may choose a less powerful model to save costs. question_consolidator_llm_help: The language model to use for the question consolidator, you may choose a less powerful model to save costs.
system_prompt: System Prompt system_prompt: System Prompt
show_indexing_options: "Show Upload Options"
hide_indexing_options: "Hide Upload Options"
allow_chat: "Allow Chat" allow_chat: "Allow Chat"
allow_chat_help: "If enabled, users in allowed groups can DM this persona" allow_chat_help: "If enabled, users in allowed groups can DM this persona"
save: Save save: Save
@ -160,10 +158,6 @@ en:
priority: Priority priority: Priority
priority_help: Priority personas are displayed to users at the top of the persona list. If multiple personas have priority, they will be sorted alphabetically. priority_help: Priority personas are displayed to users at the top of the persona list. If multiple personas have priority, they will be sorted alphabetically.
tool_options: "Tool Options" tool_options: "Tool Options"
rag_chunk_tokens: "Upload Chunk Tokens"
rag_chunk_tokens_help: "The number of tokens to use for each chunk in the RAG model. Increase to increase the amount of context the AI can use. (changing will re-index all uploads)"
rag_chunk_overlap_tokens: "Upload Chunk Overlap Tokens"
rag_chunk_overlap_tokens_help: "The number of tokens to overlap between chunks in the RAG model. (changing will re-index all uploads)"
rag_conversation_chunks: "Search Conversation Chunks" rag_conversation_chunks: "Search Conversation Chunks"
rag_conversation_chunks_help: "The number of chunks to use for the RAG model searches. Increase to increase the amount of context the AI can use." rag_conversation_chunks_help: "The number of chunks to use for the RAG model searches. Increase to increase the amount of context the AI can use."
what_are_personas: "What are AI Personas?" what_are_personas: "What are AI Personas?"
@ -178,9 +172,17 @@ en:
Moreover, you can set it up so that certain user groups have access to specific personas. This means you can have different AI behaviors for different sections of your forum, further enhancing the diversity and richness of your community's interactions. Moreover, you can set it up so that certain user groups have access to specific personas. This means you can have different AI behaviors for different sections of your forum, further enhancing the diversity and richness of your community's interactions.
rag:
options:
rag_chunk_tokens: "Upload Chunk Tokens"
rag_chunk_tokens_help: "The number of tokens to use for each chunk in the RAG model. Increase to increase the amount of context the AI can use. (changing will re-index all uploads)"
rag_chunk_overlap_tokens: "Upload Chunk Overlap Tokens"
rag_chunk_overlap_tokens_help: "The number of tokens to overlap between chunks in the RAG model. (changing will re-index all uploads)"
show_indexing_options: "Show Upload Options"
hide_indexing_options: "Hide Upload Options"
uploads: uploads:
title: "Uploads" title: "Uploads"
description: "Your AI persona will be able to search and reference the content of included files. Uploaded files should be formatted as plaintext (.txt) or markdown (.md)." description: "Uploaded files should be formatted as plaintext (.txt) or markdown (.md)."
button: "Add Files" button: "Add Files"
filter: "Filter uploads" filter: "Filter uploads"
indexed: "Indexed" indexed: "Indexed"

View File

@ -54,10 +54,15 @@ Discourse::Application.routes.draw do
) { post :test, on: :collection } ) { post :test, on: :collection }
post "/ai-personas/:id/create-user", to: "discourse_ai/admin/ai_personas#create_user" post "/ai-personas/:id/create-user", to: "discourse_ai/admin/ai_personas#create_user"
post "/ai-personas/files/upload", to: "discourse_ai/admin/ai_personas#upload_file"
put "/ai-personas/:id/files/remove", to: "discourse_ai/admin/ai_personas#remove_file" put "/ai-personas/:id/files/remove", to: "discourse_ai/admin/ai_personas#remove_file"
get "/ai-personas/:id/files/status", to: "discourse_ai/admin/ai_personas#indexing_status_check" get "/ai-personas/:id/files/status", to: "discourse_ai/admin/ai_personas#indexing_status_check"
post "/rag-document-fragments/files/upload",
to: "discourse_ai/admin/rag_document_fragments#upload_file"
get "/rag-document-fragments/files/status",
to: "discourse_ai/admin/rag_document_fragments#indexing_status_check"
resources :ai_llms, resources :ai_llms,
only: %i[index create show update destroy], only: %i[index create show update destroy],
path: "ai-llms", path: "ai-llms",

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
class AddRagColumnsToAiTools < ActiveRecord::Migration[7.1]
def change
add_column :ai_tools, :rag_chunk_tokens, :integer, null: false, default: 374
add_column :ai_tools, :rag_chunk_overlap_tokens, :integer, null: false, default: 10
end
end

View File

@ -35,6 +35,7 @@ module DiscourseAi
) )
attach_truncate(ctx) attach_truncate(ctx)
attach_http(ctx) attach_http(ctx)
attach_index(ctx)
ctx.eval(framework_script) ctx.eval(framework_script)
ctx ctx
end end
@ -50,6 +51,10 @@ module DiscourseAi
const llm = { const llm = {
truncate: _llm_truncate, truncate: _llm_truncate,
}; };
const index = {
search: _index_search,
}
function details() { return ""; }; function details() { return ""; };
JS JS
end end
@ -105,6 +110,49 @@ module DiscourseAi
private private
MAX_FRAGMENTS = 200
def rag_search(query, filenames: nil, limit: 10)
limit = limit.to_i
return [] if limit < 1
limit = [MAX_FRAGMENTS, limit].min
upload_refs =
UploadReference.where(target_id: tool.id, target_type: "AiTool").pluck(:upload_id)
if filenames
upload_refs = Upload.where(id: upload_refs).where(original_filename: filenames).pluck(:id)
end
return [] if upload_refs.empty?
strategy = DiscourseAi::Embeddings::Strategies::Truncation.new
vector_rep =
DiscourseAi::Embeddings::VectorRepresentations::Base.current_representation(strategy)
query_vector = vector_rep.vector_from(query)
fragment_ids =
vector_rep.asymmetric_rag_fragment_similarity_search(
query_vector,
target_type: "AiTool",
target_id: tool.id,
limit: limit,
offset: 0,
)
fragments =
RagDocumentFragment.where(id: fragment_ids, upload_id: upload_refs).pluck(
:id,
:fragment,
:metadata,
)
mapped = {}
fragments.each do |id, fragment, metadata|
mapped[id] = { fragment: fragment, metadata: metadata }
end
fragment_ids.take(limit).map { |fragment_id| mapped[fragment_id] }
end
def attach_truncate(mini_racer_context) def attach_truncate(mini_racer_context)
mini_racer_context.attach( mini_racer_context.attach(
"_llm_truncate", "_llm_truncate",
@ -112,6 +160,22 @@ module DiscourseAi
) )
end end
def attach_index(mini_racer_context)
mini_racer_context.attach(
"_index_search",
->(query, options) do
begin
self.running_attached_function = true
options ||= {}
options = options.symbolize_keys
self.rag_search(query, **options)
ensure
self.running_attached_function = false
end
end,
)
end
def attach_http(mini_racer_context) def attach_http(mini_racer_context)
mini_racer_context.attach( mini_racer_context.attach(
"_http_get", "_http_get",

View File

@ -38,9 +38,9 @@ RSpec.describe Jobs::GenerateRagEmbeddings do
describe "Publishing progress updates" do describe "Publishing progress updates" do
it "sends an update through mb after a batch finishes" do it "sends an update through mb after a batch finishes" do
updates = updates =
MessageBus.track_publish( MessageBus.track_publish("/discourse-ai/rag/#{rag_document_fragment_1.upload_id}") do
"/discourse-ai/ai-persona-rag/#{rag_document_fragment_1.upload_id}", subject.execute(fragment_ids: [rag_document_fragment_1.id])
) { subject.execute(fragment_ids: [rag_document_fragment_1.id]) } end
upload_index_stats = updates.last.data upload_index_stats = updates.last.data

View File

@ -4,7 +4,12 @@ RSpec.describe AiTool do
fab!(:llm_model) { Fabricate(:llm_model, name: "claude-2") } fab!(:llm_model) { Fabricate(:llm_model, name: "claude-2") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") }
def create_tool(parameters: nil, script: nil) def create_tool(
parameters: nil,
script: nil,
rag_chunk_tokens: nil,
rag_chunk_overlap_tokens: nil
)
AiTool.create!( AiTool.create!(
name: "test", name: "test",
description: "test", description: "test",
@ -12,6 +17,8 @@ RSpec.describe AiTool do
script: script || "function invoke(params) { return params; }", script: script || "function invoke(params) { return params; }",
created_by_id: 1, created_by_id: 1,
summary: "Test tool summary", summary: "Test tool summary",
rag_chunk_tokens: rag_chunk_tokens || 374,
rag_chunk_overlap_tokens: rag_chunk_overlap_tokens || 10,
) )
end end
@ -193,4 +200,95 @@ RSpec.describe AiTool do
result = runner.invoke result = runner.invoke
expect(result[:error]).to eq("Script terminated due to timeout") expect(result[:error]).to eq("Script terminated due to timeout")
end end
context "when defining RAG fragments" do
before do
SiteSetting.authorized_extensions = "txt"
SiteSetting.ai_embeddings_enabled = true
SiteSetting.ai_embeddings_discourse_service_api_endpoint = "http://test.com"
SiteSetting.ai_embeddings_model = "bge-large-en"
Jobs.run_immediately!
end
def create_upload(content, filename)
upload = nil
Tempfile.create(filename) do |file|
file.write(content)
file.rewind
upload = UploadCreator.new(file, filename).create_for(Discourse.system_user.id)
end
upload
end
def stub_embeddings
# this is a trick, we get ever increasing embeddings, this gives us in turn
# 100% consistent search results
@counter = 0
stub_request(:post, "http://test.com/api/v1/classify").to_return(
status: 200,
body: lambda { |req| ([@counter += 1] * 1024).to_json },
headers: {
},
)
end
it "allows search within uploads" do
stub_embeddings
upload1 = create_upload(<<~TXT, "test.txt")
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
TXT
upload2 = create_upload(<<~TXT, "test.txt")
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
TXT
tool = create_tool(rag_chunk_tokens: 10, rag_chunk_overlap_tokens: 4, script: <<~JS)
function invoke(params) {
let result1 = index.search("testing a search", { limit: 1 });
let result2 = index.search("testing another search", { limit: 3, filenames: ["test.txt"] });
return [result1, result2];
}
JS
RagDocumentFragment.link_target_and_uploads(tool, [upload1.id, upload2.id])
result = tool.runner({}, llm: nil, bot_user: nil, context: {}).invoke
expected = [
[{ "fragment" => "44 45 46 47 48 49 50", "metadata" => nil }],
[
{ "fragment" => "44 45 46 47 48 49 50", "metadata" => nil },
{ "fragment" => "36 37 38 39 40 41 42 43 44 45", "metadata" => nil },
{ "fragment" => "30 31 32 33 34 35 36 37", "metadata" => nil },
],
]
expect(result).to eq(expected)
# will force a reindex
tool.rag_chunk_tokens = 5
tool.rag_chunk_overlap_tokens = 2
tool.save!
# this part of the API is a bit awkward, maybe we should do it
# automatically
RagDocumentFragment.update_target_uploads(tool, [upload1.id, upload2.id])
result = tool.runner({}, llm: nil, bot_user: nil, context: {}).invoke
expected = [
[{ "fragment" => "48 49 50", "metadata" => nil }],
[
{ "fragment" => "48 49 50", "metadata" => nil },
{ "fragment" => "45 46 47", "metadata" => nil },
{ "fragment" => "42 43 44", "metadata" => nil },
],
]
expect(result).to eq(expected)
end
end
end end

View File

@ -369,17 +369,6 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
end end
end end
describe "POST #upload_file" do
it "works" do
post "/admin/plugins/discourse-ai/ai-personas/files/upload.json",
params: {
file: Rack::Test::UploadedFile.new(file_from_fixtures("spec.txt", "md")),
}
expect(response.status).to eq(200)
end
end
describe "DELETE #destroy" do describe "DELETE #destroy" do
it "destroys the requested ai_persona" do it "destroys the requested ai_persona" do
expect { expect {

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
RSpec.describe DiscourseAi::Admin::RagDocumentFragmentsController do
fab!(:admin)
fab!(:ai_persona)
before do
sign_in(admin)
SiteSetting.ai_embeddings_enabled = true
SiteSetting.ai_embeddings_discourse_service_api_endpoint = "http://test.com"
end
describe "GET #indexing_status_check" do
it "works for AiPersona" do
get "/admin/plugins/discourse-ai/rag-document-fragments/files/status.json?target_type=AiPersona&target_id=#{ai_persona.id}"
expect(response.parsed_body).to eq({})
expect(response.status).to eq(200)
end
end
describe "POST #upload_file" do
it "works" do
post "/admin/plugins/discourse-ai/rag-document-fragments/files/upload.json",
params: {
file: Rack::Test::UploadedFile.new(file_from_fixtures("spec.txt", "md")),
}
expect(response.status).to eq(200)
upload = Upload.last
expect(upload.original_filename).to end_with("spec.txt")
end
end
end