diff --git a/config/locales/server.en.yml b/config/locales/server.en.yml index 5b4dee40..e490ef24 100644 --- a/config/locales/server.en.yml +++ b/config/locales/server.en.yml @@ -434,6 +434,7 @@ en: cannot_edit_builtin: "You can't edit a built-in model." configuration: disable_embeddings: "You have to disable 'ai embeddings enabled' first." + invalid_config: "You selected a invalid option." choose_model: "Set 'ai embeddings selected model' first." llm_models: diff --git a/lib/configuration/embedding_defs_validator.rb b/lib/configuration/embedding_defs_validator.rb index 600cf759..9e962d7d 100644 --- a/lib/configuration/embedding_defs_validator.rb +++ b/lib/configuration/embedding_defs_validator.rb @@ -8,10 +8,19 @@ module DiscourseAi end def valid_value?(val) - val.blank? || EmbeddingDefinition.exists?(id: val) + if val.blank? + @module_enabled = SiteSetting.ai_embeddings_enabled + + !@module_enabled + else + EmbeddingDefinition.exists?(id: val).tap { |def_exists| @invalid_option = !def_exists } + end end def error_message + return I18n.t("discourse_ai.embeddings.configuration.disable_embeddings") if @module_enabled + return I18n.t("discourse_ai.embeddings.configuration.invalid_config") if @invalid_option + "" end end diff --git a/lib/embeddings/semantic_related.rb b/lib/embeddings/semantic_related.rb index 76562fa4..5f8c8234 100644 --- a/lib/embeddings/semantic_related.rb +++ b/lib/embeddings/semantic_related.rb @@ -10,6 +10,7 @@ module DiscourseAi def related_topic_ids_for(topic) return [] if SiteSetting.ai_embeddings_semantic_related_topics < 1 + return [] if SiteSetting.ai_embeddings_selected_model.blank? # fail-safe in case something end up in a broken state. cache_for = results_ttl(topic) diff --git a/lib/embeddings/semantic_search.rb b/lib/embeddings/semantic_search.rb index b28e9ef9..e21ff1e7 100644 --- a/lib/embeddings/semantic_search.rb +++ b/lib/embeddings/semantic_search.rb @@ -10,8 +10,8 @@ module DiscourseAi "semantic-search-#{digest}-#{SiteSetting.ai_embeddings_semantic_search_hyde_model}" Discourse.cache.delete(hyde_key) - Discourse.cache.delete("#{hyde_key}-#{SiteSetting.ai_embeddings_model}") - Discourse.cache.delete("-#{SiteSetting.ai_embeddings_model}") + Discourse.cache.delete("#{hyde_key}-#{SiteSetting.ai_embeddings_selected_model}") + Discourse.cache.delete("-#{SiteSetting.ai_embeddings_selected_model}") end def initialize(guardian) @@ -24,7 +24,7 @@ module DiscourseAi build_embedding_key( digest, SiteSetting.ai_embeddings_semantic_search_hyde_model, - SiteSetting.ai_embeddings_model, + SiteSetting.ai_embeddings_selected_model, ) Discourse.cache.read(embedding_key).present? @@ -42,7 +42,7 @@ module DiscourseAi build_embedding_key( digest, SiteSetting.ai_embeddings_semantic_search_hyde_model, - SiteSetting.ai_embeddings_model, + SiteSetting.ai_embeddings_selected_model, ) hypothetical_post = @@ -57,7 +57,7 @@ module DiscourseAi def embedding(search_term) digest = OpenSSL::Digest::SHA1.hexdigest(search_term) - embedding_key = build_embedding_key(digest, "", SiteSetting.ai_embeddings_model) + embedding_key = build_embedding_key(digest, "", SiteSetting.ai_embeddings_selected_model) Discourse.cache.fetch(embedding_key, expires_in: 1.week) { vector.vector_from(search_term) } end @@ -120,7 +120,7 @@ module DiscourseAi build_embedding_key( digest, SiteSetting.ai_embeddings_semantic_search_hyde_model, - SiteSetting.ai_embeddings_model, + SiteSetting.ai_embeddings_selected_model, ) search_term_embedding = diff --git a/spec/models/rag_document_fragment_spec.rb b/spec/models/rag_document_fragment_spec.rb index bb77f3de..31a43cbc 100644 --- a/spec/models/rag_document_fragment_spec.rb +++ b/spec/models/rag_document_fragment_spec.rb @@ -97,7 +97,7 @@ RSpec.describe RagDocumentFragment do vector.generate_representation_from(rag_document_fragment_1) end - it "regenerates all embeddings if ai_embeddings_model changes" do + it "regenerates all embeddings if ai_embeddings_selected_model changes" do old_id = rag_document_fragment_1.id UploadReference.create!(upload_id: upload_1.id, target: persona)