2023-11-23 10:58:54 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module Completions
|
|
|
|
module Endpoints
|
|
|
|
class Anthropic < Base
|
2024-07-30 12:44:57 -04:00
|
|
|
def self.can_contact?(model_provider)
|
|
|
|
model_provider == "anthropic"
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
2024-01-04 07:53:47 -05:00
|
|
|
def normalize_model_params(model_params)
|
2024-03-18 15:48:46 -04:00
|
|
|
# max_tokens, temperature, stop_sequences are already supported
|
2024-01-04 07:53:47 -05:00
|
|
|
model_params
|
|
|
|
end
|
|
|
|
|
2024-03-18 15:48:46 -04:00
|
|
|
def default_options(dialect)
|
|
|
|
mapped_model =
|
2024-07-30 12:44:57 -04:00
|
|
|
case llm_model.name
|
2024-03-18 15:48:46 -04:00
|
|
|
when "claude-2"
|
|
|
|
"claude-2.1"
|
|
|
|
when "claude-instant-1"
|
|
|
|
"claude-instant-1.2"
|
|
|
|
when "claude-3-haiku"
|
|
|
|
"claude-3-haiku-20240307"
|
|
|
|
when "claude-3-sonnet"
|
|
|
|
"claude-3-sonnet-20240229"
|
|
|
|
when "claude-3-opus"
|
|
|
|
"claude-3-opus-20240229"
|
2024-06-21 03:32:15 -04:00
|
|
|
when "claude-3-5-sonnet"
|
2024-10-24 15:24:53 -04:00
|
|
|
"claude-3-5-sonnet-latest"
|
2024-03-18 15:48:46 -04:00
|
|
|
else
|
2024-07-30 12:44:57 -04:00
|
|
|
llm_model.name
|
2024-03-18 15:48:46 -04:00
|
|
|
end
|
|
|
|
|
2024-11-18 17:22:39 -05:00
|
|
|
# Note: Anthropic requires this param
|
|
|
|
max_tokens = 4096
|
|
|
|
max_tokens = 8192 if mapped_model.match?(/3.5/)
|
|
|
|
|
|
|
|
options = { model: mapped_model, max_tokens: max_tokens }
|
2024-06-07 09:52:01 -04:00
|
|
|
|
|
|
|
options[:stop_sequences] = ["</function_calls>"] if !dialect.native_tool_support? &&
|
|
|
|
dialect.prompt.has_tools?
|
|
|
|
|
|
|
|
options
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def provider_id
|
|
|
|
AiApiAuditLog::Provider::Anthropic
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2024-06-07 09:52:01 -04:00
|
|
|
def xml_tags_to_strip(dialect)
|
|
|
|
if dialect.prompt.has_tools?
|
|
|
|
%w[thinking search_quality_reflection search_quality_score]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-03-18 15:48:46 -04:00
|
|
|
# this is an approximation, we will update it later if request goes through
|
|
|
|
def prompt_size(prompt)
|
2024-05-07 09:02:16 -04:00
|
|
|
tokenizer.size(prompt.system_prompt.to_s + " " + prompt.messages.to_s)
|
2024-03-18 15:48:46 -04:00
|
|
|
end
|
|
|
|
|
2023-11-23 10:58:54 -05:00
|
|
|
def model_uri
|
2024-07-30 12:44:57 -04:00
|
|
|
URI(llm_model.url)
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
2024-11-11 16:14:30 -05:00
|
|
|
def xml_tools_enabled?
|
|
|
|
!@native_tool_support
|
|
|
|
end
|
|
|
|
|
2024-03-18 15:48:46 -04:00
|
|
|
def prepare_payload(prompt, model_params, dialect)
|
2024-06-07 09:52:01 -04:00
|
|
|
@native_tool_support = dialect.native_tool_support?
|
|
|
|
|
2024-03-18 15:48:46 -04:00
|
|
|
payload = default_options(dialect).merge(model_params).merge(messages: prompt.messages)
|
|
|
|
|
|
|
|
payload[:system] = prompt.system_prompt if prompt.system_prompt.present?
|
|
|
|
payload[:stream] = true if @streaming_mode
|
2024-10-24 15:24:53 -04:00
|
|
|
if prompt.has_tools?
|
|
|
|
payload[:tools] = prompt.tools
|
|
|
|
if dialect.tool_choice.present?
|
|
|
|
payload[:tool_choice] = { type: "tool", name: dialect.tool_choice }
|
|
|
|
end
|
|
|
|
end
|
2024-03-18 15:48:46 -04:00
|
|
|
|
|
|
|
payload
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def prepare_request(payload)
|
|
|
|
headers = {
|
|
|
|
"anthropic-version" => "2023-06-01",
|
2024-07-30 12:44:57 -04:00
|
|
|
"x-api-key" => llm_model.api_key,
|
2023-11-23 10:58:54 -05:00
|
|
|
"content-type" => "application/json",
|
|
|
|
}
|
|
|
|
|
|
|
|
Net::HTTP::Post.new(model_uri, headers).tap { |r| r.body = payload }
|
|
|
|
end
|
|
|
|
|
2024-11-11 16:14:30 -05:00
|
|
|
def decode_chunk(partial_data)
|
|
|
|
@decoder ||= JsonStreamDecoder.new
|
|
|
|
(@decoder << partial_data)
|
|
|
|
.map { |parsed_json| processor.process_streamed_message(parsed_json) }
|
|
|
|
.compact
|
2024-06-05 18:34:23 -04:00
|
|
|
end
|
|
|
|
|
2024-11-11 16:14:30 -05:00
|
|
|
def decode(response_data)
|
|
|
|
processor.process_message(response_data)
|
2024-03-18 15:48:46 -04:00
|
|
|
end
|
|
|
|
|
2024-11-11 16:14:30 -05:00
|
|
|
def processor
|
|
|
|
@processor ||=
|
2024-11-13 14:58:24 -05:00
|
|
|
DiscourseAi::Completions::AnthropicMessageProcessor.new(
|
|
|
|
streaming_mode: @streaming_mode,
|
|
|
|
partial_tool_calls: partial_tool_calls,
|
|
|
|
)
|
2024-06-05 18:34:23 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def has_tool?(_response_data)
|
|
|
|
processor.tool_calls.present?
|
|
|
|
end
|
|
|
|
|
2024-11-11 16:14:30 -05:00
|
|
|
def tool_calls
|
|
|
|
processor.to_tool_calls
|
|
|
|
end
|
|
|
|
|
2024-06-05 18:34:23 -04:00
|
|
|
def final_log_update(log)
|
|
|
|
log.request_tokens = processor.input_tokens if processor.input_tokens
|
|
|
|
log.response_tokens = processor.output_tokens if processor.output_tokens
|
|
|
|
end
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|