mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-10-27 04:28:38 +00:00
Introduce a Discourse Automation based periodical report. Depends on Discourse Automation. Report works best with very large context language models such as GPT-4-Turbo and Claude 2. - Introduces final_insts to generic llm format, for claude to work best it is better to guide the last assistant message (we should add this to other spots as well) - Adds GPT-4 turbo support to generic llm interface
80 lines
2.1 KiB
Ruby
80 lines
2.1 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
module DiscourseAi
|
|
module Completions
|
|
module Dialects
|
|
class Claude < Dialect
|
|
class << self
|
|
def can_translate?(model_name)
|
|
%w[claude-instant-1 claude-2].include?(model_name)
|
|
end
|
|
|
|
def tokenizer
|
|
DiscourseAi::Tokenizer::AnthropicTokenizer
|
|
end
|
|
end
|
|
|
|
def translate
|
|
claude_prompt = +"Human: #{prompt[:insts]}\n"
|
|
|
|
claude_prompt << build_tools_prompt if prompt[:tools]
|
|
|
|
claude_prompt << build_examples(prompt[:examples]) if prompt[:examples]
|
|
|
|
claude_prompt << conversation_context if prompt[:conversation_context]
|
|
|
|
claude_prompt << "#{prompt[:input]}\n"
|
|
|
|
claude_prompt << "#{prompt[:post_insts]}\n" if prompt[:post_insts]
|
|
|
|
claude_prompt << "Assistant:"
|
|
claude_prompt << " #{prompt[:final_insts]}:" if prompt[:final_insts]
|
|
claude_prompt << "\n"
|
|
end
|
|
|
|
def max_prompt_tokens
|
|
50_000
|
|
end
|
|
|
|
def conversation_context
|
|
return "" if prompt[:conversation_context].blank?
|
|
|
|
trimmed_context = trim_context(prompt[:conversation_context])
|
|
|
|
trimmed_context
|
|
.reverse
|
|
.reduce(+"") do |memo, context|
|
|
memo << (context[:type] == "user" ? "Human:" : "Assistant:")
|
|
|
|
if context[:type] == "tool"
|
|
memo << <<~TEXT
|
|
|
|
<function_results>
|
|
<result>
|
|
<tool_name>#{context[:name]}</tool_name>
|
|
<json>
|
|
#{context[:content]}
|
|
</json>
|
|
</result>
|
|
</function_results>
|
|
TEXT
|
|
else
|
|
memo << " " << context[:content] << "\n"
|
|
end
|
|
|
|
memo
|
|
end
|
|
end
|
|
|
|
private
|
|
|
|
def build_examples(examples_arr)
|
|
examples_arr.reduce("") do |memo, example|
|
|
memo += "<example>\nH: #{example[0]}\nA: #{example[1]}\n</example>\n"
|
|
end
|
|
end
|
|
end
|
|
end
|
|
end
|
|
end
|