diff --git a/lib/modules/embeddings/hyde_generators/anthropic.rb b/lib/modules/embeddings/hyde_generators/anthropic.rb
index 72d36dac..7a1e87ff 100644
--- a/lib/modules/embeddings/hyde_generators/anthropic.rb
+++ b/lib/modules/embeddings/hyde_generators/anthropic.rb
@@ -6,12 +6,14 @@ module DiscourseAi
class Anthropic < DiscourseAi::Embeddings::HydeGenerators::Base
def prompt(search_term)
<<~TEXT
- Given a search term given between tags, generate a forum post about a given subject.
- #{basic_prompt_instruction}
- #{search_term}
+ Human: Given a search term given between tags, generate a forum post about a given subject.
+ #{basic_prompt_instruction}
+ #{search_term}
- Respond with the generated post between tags.
- TEXT
+ Respond with the generated post between tags.
+
+ Assistant:\n
+ TEXT
end
def models
@@ -24,6 +26,7 @@ module DiscourseAi
prompt(query),
SiteSetting.ai_embeddings_semantic_search_hyde_model,
max_tokens: 400,
+ stop_sequences: [""],
).dig(:completion)
Nokogiri::HTML5.fragment(response).at("ai").text
diff --git a/lib/shared/inference/anthropic_completions.rb b/lib/shared/inference/anthropic_completions.rb
index f6f2b920..43bfa2d1 100644
--- a/lib/shared/inference/anthropic_completions.rb
+++ b/lib/shared/inference/anthropic_completions.rb
@@ -12,7 +12,8 @@ module ::DiscourseAi
temperature: nil,
top_p: nil,
max_tokens: nil,
- user_id: nil
+ user_id: nil,
+ stop_sequences: nil
)
log = nil
response_data = +""
@@ -31,6 +32,7 @@ module ::DiscourseAi
payload[:max_tokens_to_sample] = max_tokens || 2000
payload[:temperature] = temperature if temperature
payload[:stream] = true if block_given?
+ payload[:stop_sequences] = stop_sequences if stop_sequences
Net::HTTP.start(
url.host,