2023-11-23 10:58:54 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module Completions
|
|
|
|
module Dialects
|
2023-12-18 16:06:01 -05:00
|
|
|
class OrcaStyle < Dialect
|
|
|
|
class << self
|
|
|
|
def can_translate?(model_name)
|
|
|
|
%w[StableBeluga2 Upstage-Llama-2-*-instruct-v2].include?(model_name)
|
|
|
|
end
|
|
|
|
|
|
|
|
def tokenizer
|
|
|
|
DiscourseAi::Tokenizer::Llama2Tokenizer
|
|
|
|
end
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
2023-12-18 16:06:01 -05:00
|
|
|
def translate
|
2024-01-12 12:36:44 -05:00
|
|
|
messages = prompt.messages
|
|
|
|
trimmed_messages = trim_messages(messages)
|
2023-11-23 10:58:54 -05:00
|
|
|
|
2024-01-12 12:36:44 -05:00
|
|
|
# Need to include this differently
|
|
|
|
last_message = trimmed_messages.last[:type] == :assistant ? trimmed_messages.pop : nil
|
2023-11-23 10:58:54 -05:00
|
|
|
|
2024-01-12 12:36:44 -05:00
|
|
|
llama2_prompt =
|
|
|
|
trimmed_messages.reduce(+"") do |memo, msg|
|
|
|
|
next(memo) if msg[:type] == :tool_call
|
2023-12-18 16:06:01 -05:00
|
|
|
|
2024-01-12 12:36:44 -05:00
|
|
|
if msg[:type] == :system
|
|
|
|
memo << (<<~TEXT).strip
|
|
|
|
### System:
|
|
|
|
#{msg[:content]}
|
|
|
|
#{build_tools_prompt}
|
|
|
|
TEXT
|
|
|
|
elsif msg[:type] == :model
|
|
|
|
memo << "\n### Assistant:\n#{msg[:content]}"
|
|
|
|
elsif msg[:type] == :tool
|
|
|
|
memo << "\n### Assistant:\n"
|
2023-12-18 16:06:01 -05:00
|
|
|
|
2024-01-12 12:36:44 -05:00
|
|
|
memo << (<<~TEXT).strip
|
2023-12-18 16:06:01 -05:00
|
|
|
<function_results>
|
|
|
|
<result>
|
2024-01-12 12:36:44 -05:00
|
|
|
<tool_name>#{msg[:id]}</tool_name>
|
2023-12-18 16:06:01 -05:00
|
|
|
<json>
|
2024-01-12 12:36:44 -05:00
|
|
|
#{msg[:content]}
|
2023-12-18 16:06:01 -05:00
|
|
|
</json>
|
|
|
|
</result>
|
|
|
|
</function_results>
|
|
|
|
TEXT
|
|
|
|
else
|
2024-01-12 12:36:44 -05:00
|
|
|
memo << "\n### User:\n#{msg[:content]}"
|
2023-12-18 16:06:01 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
memo
|
|
|
|
end
|
2024-01-12 12:36:44 -05:00
|
|
|
|
|
|
|
llama2_prompt << "\n### Assistant:\n"
|
|
|
|
llama2_prompt << "#{last_message[:content]}:" if last_message
|
|
|
|
|
|
|
|
llama2_prompt
|
2023-12-18 16:06:01 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def max_prompt_tokens
|
|
|
|
SiteSetting.ai_hugging_face_token_limit
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|