2024-04-10 17:24:17 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
# see: https://docs.cohere.com/reference/chat
|
|
|
|
#
|
|
|
|
module DiscourseAi
|
|
|
|
module Completions
|
|
|
|
module Dialects
|
|
|
|
class Command < Dialect
|
2024-12-05 15:45:58 -05:00
|
|
|
def self.can_translate?(llm_model)
|
|
|
|
llm_model.provider == "cohere"
|
2024-04-10 17:24:17 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
VALID_ID_REGEX = /\A[a-zA-Z0-9_]+\z/
|
|
|
|
|
|
|
|
def translate
|
2024-05-07 09:02:16 -04:00
|
|
|
messages = super
|
2024-04-10 17:24:17 -04:00
|
|
|
|
2024-05-07 09:02:16 -04:00
|
|
|
system_message = messages.shift[:message] if messages.first[:role] == "SYSTEM"
|
2024-04-10 17:24:17 -04:00
|
|
|
|
2024-05-07 09:02:16 -04:00
|
|
|
prompt = { preamble: +"#{system_message}" }
|
2024-04-10 17:24:17 -04:00
|
|
|
|
2024-06-03 18:59:15 -04:00
|
|
|
if messages.present?
|
|
|
|
with_mapped_tools = []
|
|
|
|
|
|
|
|
current_pair = nil
|
|
|
|
messages.each do |msg|
|
|
|
|
if current_pair == nil && msg[:type] == :tool_call
|
|
|
|
current_pair = [msg]
|
|
|
|
elsif current_pair && msg[:type] == :tool
|
|
|
|
current_pair << msg
|
|
|
|
tool_results = tools_dialect.tool_results(current_pair)
|
|
|
|
with_mapped_tools << { role: "TOOL", message: "", tool_results: tool_results }
|
|
|
|
current_pair = nil
|
|
|
|
else
|
|
|
|
with_mapped_tools << msg
|
|
|
|
current_pair = nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
messages = with_mapped_tools
|
|
|
|
prompt[:chat_history] = messages
|
|
|
|
end
|
|
|
|
|
|
|
|
tools = tools_dialect.translated_tools
|
|
|
|
prompt[:tools] = tools if tools.present?
|
|
|
|
|
|
|
|
tool_results =
|
|
|
|
messages.last && messages.last[:role] == "TOOL" && messages.last[:tool_results]
|
|
|
|
prompt[:tool_results] = tool_results if tool_results.present?
|
|
|
|
|
|
|
|
if tool_results.blank?
|
|
|
|
messages.reverse_each do |msg|
|
|
|
|
if msg[:role] == "USER"
|
|
|
|
prompt[:message] = msg[:message]
|
|
|
|
messages.delete(msg)
|
|
|
|
break
|
|
|
|
end
|
2024-04-10 17:24:17 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
prompt
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_prompt_tokens
|
2024-07-30 12:44:57 -04:00
|
|
|
llm_model.max_prompt_tokens
|
2024-04-10 17:24:17 -04:00
|
|
|
end
|
|
|
|
|
2024-06-03 18:59:15 -04:00
|
|
|
def native_tool_support?
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2024-04-10 17:24:17 -04:00
|
|
|
private
|
|
|
|
|
2024-06-03 18:59:15 -04:00
|
|
|
def tools_dialect
|
|
|
|
@tools_dialect ||= DiscourseAi::Completions::Dialects::CohereTools.new(prompt.tools)
|
|
|
|
end
|
|
|
|
|
2024-04-10 17:24:17 -04:00
|
|
|
def per_message_overhead
|
|
|
|
0
|
|
|
|
end
|
|
|
|
|
|
|
|
def calculate_message_token(context)
|
2024-07-30 12:44:57 -04:00
|
|
|
llm_model.tokenizer_class.size(context[:content].to_s + context[:name].to_s)
|
2024-04-10 17:24:17 -04:00
|
|
|
end
|
2024-05-07 09:02:16 -04:00
|
|
|
|
|
|
|
def system_msg(msg)
|
|
|
|
cmd_msg = { role: "SYSTEM", message: msg[:content] }
|
|
|
|
|
|
|
|
if tools_dialect.instructions.present?
|
|
|
|
cmd_msg[:message] = [
|
|
|
|
msg[:content],
|
|
|
|
tools_dialect.instructions,
|
|
|
|
"NEVER attempt to run tools using JSON, always use XML. Lives depend on it.",
|
|
|
|
].join("\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
cmd_msg
|
|
|
|
end
|
|
|
|
|
|
|
|
def model_msg(msg)
|
|
|
|
{ role: "CHATBOT", message: msg[:content] }
|
|
|
|
end
|
|
|
|
|
|
|
|
def tool_call_msg(msg)
|
2024-06-03 18:59:15 -04:00
|
|
|
msg
|
2024-05-07 09:02:16 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def tool_msg(msg)
|
2024-06-03 18:59:15 -04:00
|
|
|
msg
|
2024-05-07 09:02:16 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def user_msg(msg)
|
|
|
|
user_message = { role: "USER", message: msg[:content] }
|
|
|
|
user_message[:message] = "#{msg[:id]}: #{msg[:content]}" if msg[:id]
|
|
|
|
|
|
|
|
user_message
|
|
|
|
end
|
2024-04-10 17:24:17 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|