2024-09-30 20:45:03 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module Completions
|
|
|
|
module Dialects
|
|
|
|
class Ollama < Dialect
|
|
|
|
class << self
|
|
|
|
def can_translate?(model_provider)
|
|
|
|
model_provider == "ollama"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-10-10 16:25:53 -04:00
|
|
|
def native_tool_support?
|
|
|
|
enable_native_tool?
|
|
|
|
end
|
2024-09-30 20:45:03 -04:00
|
|
|
|
|
|
|
def max_prompt_tokens
|
|
|
|
llm_model.max_prompt_tokens
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2024-10-10 16:25:53 -04:00
|
|
|
def tools_dialect
|
|
|
|
if enable_native_tool?
|
|
|
|
@tools_dialect ||= DiscourseAi::Completions::Dialects::OllamaTools.new(prompt.tools)
|
|
|
|
else
|
|
|
|
super
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-09-30 20:45:03 -04:00
|
|
|
def tokenizer
|
|
|
|
llm_model.tokenizer_class
|
|
|
|
end
|
|
|
|
|
|
|
|
def model_msg(msg)
|
|
|
|
{ role: "assistant", content: msg[:content] }
|
|
|
|
end
|
|
|
|
|
2024-10-10 16:25:53 -04:00
|
|
|
def tool_call_msg(msg)
|
2024-11-18 17:22:39 -05:00
|
|
|
if enable_native_tool?
|
|
|
|
tools_dialect.from_raw_tool_call(msg)
|
|
|
|
else
|
|
|
|
super
|
|
|
|
end
|
2024-10-10 16:25:53 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def tool_msg(msg)
|
2024-11-18 17:22:39 -05:00
|
|
|
if enable_native_tool?
|
|
|
|
tools_dialect.from_raw_tool(msg)
|
|
|
|
else
|
|
|
|
super
|
|
|
|
end
|
2024-10-10 16:25:53 -04:00
|
|
|
end
|
|
|
|
|
2024-09-30 20:45:03 -04:00
|
|
|
def system_msg(msg)
|
2024-10-10 16:25:53 -04:00
|
|
|
msg = { role: "system", content: msg[:content] }
|
|
|
|
|
|
|
|
if tools_dialect.instructions.present?
|
|
|
|
msg[:content] = msg[:content].dup << "\n\n#{tools_dialect.instructions}"
|
|
|
|
end
|
|
|
|
|
|
|
|
msg
|
|
|
|
end
|
|
|
|
|
|
|
|
def enable_native_tool?
|
|
|
|
return @enable_native_tool if defined?(@enable_native_tool)
|
|
|
|
|
|
|
|
@enable_native_tool = llm_model.lookup_custom_param("enable_native_tool")
|
2024-09-30 20:45:03 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def user_msg(msg)
|
|
|
|
user_message = { role: "user", content: msg[:content] }
|
|
|
|
|
2024-11-11 16:14:30 -05:00
|
|
|
encoded_uploads = prompt.encoded_uploads(msg)
|
|
|
|
if encoded_uploads.present?
|
|
|
|
images =
|
|
|
|
encoded_uploads
|
|
|
|
.map do |upload|
|
|
|
|
if upload[:mime_type].start_with?("image/")
|
|
|
|
upload[:base64]
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
.compact
|
|
|
|
|
|
|
|
user_message[:images] = images if images.present?
|
|
|
|
end
|
|
|
|
|
|
|
|
# TODO: Add support for user messages with embedded user ids
|
2024-09-30 20:45:03 -04:00
|
|
|
|
|
|
|
user_message
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|