2024-05-13 15:54:42 -03:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module Completions
|
|
|
|
module Dialects
|
2025-04-01 02:39:07 +11:00
|
|
|
class OpenAiCompatible < ChatGpt
|
2024-05-13 15:54:42 -03:00
|
|
|
class << self
|
2024-12-06 07:45:58 +11:00
|
|
|
def can_translate?(_llm_model)
|
|
|
|
# fallback dialect
|
2024-05-13 15:54:42 -03:00
|
|
|
true
|
|
|
|
end
|
2024-05-16 09:50:22 -03:00
|
|
|
end
|
2024-05-13 15:54:42 -03:00
|
|
|
|
2024-05-16 09:50:22 -03:00
|
|
|
def tokenizer
|
|
|
|
llm_model&.tokenizer_class || DiscourseAi::Tokenizer::Llama3Tokenizer
|
2024-05-13 15:54:42 -03:00
|
|
|
end
|
|
|
|
|
|
|
|
def tools
|
|
|
|
@tools ||= tools_dialect.translated_tools
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_prompt_tokens
|
2024-05-16 09:50:22 -03:00
|
|
|
return llm_model.max_prompt_tokens if llm_model&.max_prompt_tokens
|
2024-05-13 15:54:42 -03:00
|
|
|
|
|
|
|
32_000
|
|
|
|
end
|
|
|
|
|
2024-08-21 11:41:55 -03:00
|
|
|
def translate
|
|
|
|
translated = super
|
|
|
|
|
|
|
|
return translated unless llm_model.lookup_custom_param("disable_system_prompt")
|
|
|
|
|
2024-08-23 16:41:57 -03:00
|
|
|
system_msg, user_msg = translated.shift(2)
|
|
|
|
|
|
|
|
if user_msg[:content].is_a?(Array) # Has inline images.
|
|
|
|
user_msg[:content].first[:text] = [
|
|
|
|
system_msg[:content],
|
|
|
|
user_msg[:content].first[:text],
|
|
|
|
].join("\n")
|
|
|
|
else
|
|
|
|
user_msg[:content] = [system_msg[:content], user_msg[:content]].join("\n")
|
|
|
|
end
|
2024-08-21 11:41:55 -03:00
|
|
|
|
|
|
|
translated.unshift(user_msg)
|
|
|
|
end
|
2024-05-13 15:54:42 -03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|