mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-07-24 06:53:41 +00:00
FIX: occurrences of old model setting
This commit is contained in:
parent
7af3ce820d
commit
273a1fa618
@ -6,8 +6,8 @@ module DiscourseAi
|
||||
def self.clear_cache_for(query)
|
||||
digest = OpenSSL::Digest::SHA1.hexdigest(query)
|
||||
|
||||
hyde_key =
|
||||
"semantic-search-#{digest}-#{SiteSetting.ai_embeddings_semantic_search_hyde_model}"
|
||||
hyde_model_id = find_ai_hyde_model_id
|
||||
hyde_key = "semantic-search-#{digest}-#{hyde_model_id}"
|
||||
|
||||
Discourse.cache.delete(hyde_key)
|
||||
Discourse.cache.delete("#{hyde_key}-#{SiteSetting.ai_embeddings_selected_model}")
|
||||
@ -20,12 +20,9 @@ module DiscourseAi
|
||||
|
||||
def cached_query?(query)
|
||||
digest = OpenSSL::Digest::SHA1.hexdigest(query)
|
||||
hyde_model_id = find_ai_hyde_model_id
|
||||
embedding_key =
|
||||
build_embedding_key(
|
||||
digest,
|
||||
SiteSetting.ai_embeddings_semantic_search_hyde_model,
|
||||
SiteSetting.ai_embeddings_selected_model,
|
||||
)
|
||||
build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model)
|
||||
|
||||
Discourse.cache.read(embedding_key).present?
|
||||
end
|
||||
@ -36,14 +33,11 @@ module DiscourseAi
|
||||
|
||||
def hyde_embedding(search_term)
|
||||
digest = OpenSSL::Digest::SHA1.hexdigest(search_term)
|
||||
hyde_key = build_hyde_key(digest, SiteSetting.ai_embeddings_semantic_search_hyde_model)
|
||||
hyde_model_id = find_ai_hyde_model_id
|
||||
hyde_key = build_hyde_key(digest, hyde_model_id)
|
||||
|
||||
embedding_key =
|
||||
build_embedding_key(
|
||||
digest,
|
||||
SiteSetting.ai_embeddings_semantic_search_hyde_model,
|
||||
SiteSetting.ai_embeddings_selected_model,
|
||||
)
|
||||
build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model)
|
||||
|
||||
hypothetical_post =
|
||||
Discourse
|
||||
@ -111,6 +105,7 @@ module DiscourseAi
|
||||
max_semantic_results_per_page = 100
|
||||
search = Search.new(query, { guardian: guardian })
|
||||
search_term = search.term
|
||||
hyde_model_id = find_ai_hyde_model_id
|
||||
|
||||
return [] if search_term.nil? || search_term.length < SiteSetting.min_search_term_length
|
||||
|
||||
@ -119,11 +114,7 @@ module DiscourseAi
|
||||
digest = OpenSSL::Digest::SHA1.hexdigest(search_term)
|
||||
|
||||
embedding_key =
|
||||
build_embedding_key(
|
||||
digest,
|
||||
SiteSetting.ai_embeddings_semantic_search_hyde_model,
|
||||
SiteSetting.ai_embeddings_selected_model,
|
||||
)
|
||||
build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model)
|
||||
|
||||
search_term_embedding =
|
||||
Discourse
|
||||
@ -221,6 +212,19 @@ module DiscourseAi
|
||||
end
|
||||
end
|
||||
|
||||
def find_ai_hyde_model_id
|
||||
persona_llm_id =
|
||||
AiPersona.find_by(
|
||||
id: SiteSetting.ai_embeddings_semantic_search_hyde_persona,
|
||||
)&.default_llm_id
|
||||
|
||||
if persona_llm_id.present?
|
||||
persona_llm_id
|
||||
else
|
||||
SiteSetting.ai_default_llm_model.to_i || LlmModel.last&.id
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :guardian
|
||||
|
@ -88,7 +88,7 @@ namespace :ai do
|
||||
messages: [{ type: :user, content: prompt, id: "user" }],
|
||||
)
|
||||
|
||||
DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate(
|
||||
DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_defaulit_llm_model).generate(
|
||||
prompt,
|
||||
user: Discourse.system_user,
|
||||
feature_name: "topic-generator",
|
||||
|
Loading…
x
Reference in New Issue
Block a user