2023-11-23 10:58:54 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2023-12-18 14:06:06 -05:00
|
|
|
require "aws-sigv4"
|
|
|
|
|
2023-11-23 10:58:54 -05:00
|
|
|
module DiscourseAi
|
|
|
|
module Completions
|
|
|
|
module Endpoints
|
|
|
|
class AwsBedrock < Base
|
2024-12-05 15:45:58 -05:00
|
|
|
attr_reader :dialect
|
|
|
|
|
2024-07-30 12:44:57 -04:00
|
|
|
def self.can_contact?(model_provider)
|
|
|
|
model_provider == "aws_bedrock"
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
2024-01-04 07:53:47 -05:00
|
|
|
def normalize_model_params(model_params)
|
|
|
|
model_params = model_params.dup
|
|
|
|
|
2024-03-18 15:48:46 -04:00
|
|
|
# max_tokens, temperature, stop_sequences, top_p are already supported
|
2024-01-04 07:53:47 -05:00
|
|
|
|
|
|
|
model_params
|
|
|
|
end
|
|
|
|
|
2024-03-18 15:48:46 -04:00
|
|
|
def default_options(dialect)
|
2024-12-05 15:45:58 -05:00
|
|
|
options =
|
|
|
|
if dialect.is_a?(DiscourseAi::Completions::Dialects::Claude)
|
|
|
|
max_tokens = 4096
|
|
|
|
max_tokens = 8192 if bedrock_model_id.match?(/3.5/)
|
|
|
|
|
|
|
|
{ max_tokens: max_tokens, anthropic_version: "bedrock-2023-05-31" }
|
|
|
|
else
|
|
|
|
{}
|
|
|
|
end
|
2024-06-07 09:52:01 -04:00
|
|
|
|
|
|
|
options[:stop_sequences] = ["</function_calls>"] if !dialect.native_tool_support? &&
|
|
|
|
dialect.prompt.has_tools?
|
2024-03-18 15:48:46 -04:00
|
|
|
options
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def provider_id
|
|
|
|
AiApiAuditLog::Provider::Anthropic
|
|
|
|
end
|
|
|
|
|
2024-06-07 09:52:01 -04:00
|
|
|
def xml_tags_to_strip(dialect)
|
|
|
|
if dialect.prompt.has_tools?
|
|
|
|
%w[thinking search_quality_reflection search_quality_score]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-11-23 10:58:54 -05:00
|
|
|
private
|
|
|
|
|
2024-11-19 01:28:09 -05:00
|
|
|
def bedrock_model_id
|
|
|
|
case llm_model.name
|
|
|
|
when "claude-2"
|
|
|
|
"anthropic.claude-v2:1"
|
|
|
|
when "claude-3-haiku"
|
|
|
|
"anthropic.claude-3-haiku-20240307-v1:0"
|
|
|
|
when "claude-3-sonnet"
|
|
|
|
"anthropic.claude-3-sonnet-20240229-v1:0"
|
|
|
|
when "claude-instant-1"
|
|
|
|
"anthropic.claude-instant-v1"
|
|
|
|
when "claude-3-opus"
|
|
|
|
"anthropic.claude-3-opus-20240229-v1:0"
|
|
|
|
when "claude-3-5-sonnet"
|
|
|
|
"anthropic.claude-3-5-sonnet-20241022-v2:0"
|
|
|
|
when "claude-3-5-haiku"
|
|
|
|
"anthropic.claude-3-5-haiku-20241022-v1:0"
|
|
|
|
else
|
|
|
|
llm_model.name
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-03-18 15:48:46 -04:00
|
|
|
def prompt_size(prompt)
|
|
|
|
# approximation
|
2024-05-07 09:02:16 -04:00
|
|
|
tokenizer.size(prompt.system_prompt.to_s + " " + prompt.messages.to_s)
|
2024-03-18 15:48:46 -04:00
|
|
|
end
|
|
|
|
|
2023-11-23 10:58:54 -05:00
|
|
|
def model_uri
|
2024-07-30 12:44:57 -04:00
|
|
|
region = llm_model.lookup_custom_param("region")
|
|
|
|
|
2024-08-07 15:08:56 -04:00
|
|
|
if region.blank? || bedrock_model_id.blank?
|
|
|
|
raise CompletionFailed.new(I18n.t("discourse_ai.llm_models.bedrock_invalid_url"))
|
|
|
|
end
|
|
|
|
|
2024-07-30 12:44:57 -04:00
|
|
|
api_url =
|
|
|
|
"https://bedrock-runtime.#{region}.amazonaws.com/model/#{bedrock_model_id}/invoke"
|
2023-11-23 10:58:54 -05:00
|
|
|
|
|
|
|
api_url = @streaming_mode ? (api_url + "-with-response-stream") : api_url
|
|
|
|
|
|
|
|
URI(api_url)
|
|
|
|
end
|
|
|
|
|
2024-03-18 15:48:46 -04:00
|
|
|
def prepare_payload(prompt, model_params, dialect)
|
2024-06-07 09:52:01 -04:00
|
|
|
@native_tool_support = dialect.native_tool_support?
|
2024-12-05 15:45:58 -05:00
|
|
|
@dialect = dialect
|
2024-06-07 09:52:01 -04:00
|
|
|
|
2024-12-05 15:45:58 -05:00
|
|
|
payload = nil
|
2024-10-24 15:24:53 -04:00
|
|
|
|
2024-12-05 15:45:58 -05:00
|
|
|
if dialect.is_a?(DiscourseAi::Completions::Dialects::Claude)
|
|
|
|
payload = default_options(dialect).merge(model_params).merge(messages: prompt.messages)
|
|
|
|
payload[:system] = prompt.system_prompt if prompt.system_prompt.present?
|
|
|
|
|
|
|
|
if prompt.has_tools?
|
|
|
|
payload[:tools] = prompt.tools
|
|
|
|
if dialect.tool_choice.present?
|
|
|
|
payload[:tool_choice] = { type: "tool", name: dialect.tool_choice }
|
|
|
|
end
|
2024-10-24 15:24:53 -04:00
|
|
|
end
|
2024-12-05 15:45:58 -05:00
|
|
|
elsif dialect.is_a?(DiscourseAi::Completions::Dialects::Nova)
|
|
|
|
payload = prompt.to_payload(default_options(dialect).merge(model_params))
|
|
|
|
else
|
|
|
|
raise "Unsupported dialect"
|
2024-10-24 15:24:53 -04:00
|
|
|
end
|
2024-03-18 15:48:46 -04:00
|
|
|
payload
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def prepare_request(payload)
|
|
|
|
headers = { "content-type" => "application/json", "Accept" => "*/*" }
|
|
|
|
|
|
|
|
signer =
|
|
|
|
Aws::Sigv4::Signer.new(
|
2024-07-30 12:44:57 -04:00
|
|
|
access_key_id: llm_model.lookup_custom_param("access_key_id"),
|
|
|
|
region: llm_model.lookup_custom_param("region"),
|
|
|
|
secret_access_key: llm_model.api_key,
|
2023-11-23 10:58:54 -05:00
|
|
|
service: "bedrock",
|
|
|
|
)
|
|
|
|
|
|
|
|
Net::HTTP::Post
|
2023-11-23 13:49:24 -05:00
|
|
|
.new(model_uri)
|
2023-11-23 10:58:54 -05:00
|
|
|
.tap do |r|
|
|
|
|
r.body = payload
|
|
|
|
|
|
|
|
signed_request =
|
|
|
|
signer.sign_request(req: r, http_method: r.method, url: model_uri, body: r.body)
|
|
|
|
|
|
|
|
r.initialize_http_header(headers.merge(signed_request.headers))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-11-11 16:14:30 -05:00
|
|
|
def decode_chunk(partial_data)
|
|
|
|
bedrock_decode(partial_data)
|
|
|
|
.map do |decoded_partial_data|
|
|
|
|
@raw_response ||= +""
|
|
|
|
@raw_response << decoded_partial_data
|
|
|
|
@raw_response << "\n"
|
|
|
|
|
|
|
|
parsed_json = JSON.parse(decoded_partial_data, symbolize_names: true)
|
|
|
|
processor.process_streamed_message(parsed_json)
|
|
|
|
end
|
|
|
|
.compact
|
|
|
|
end
|
|
|
|
|
|
|
|
def decode(response_data)
|
|
|
|
processor.process_message(response_data)
|
|
|
|
end
|
|
|
|
|
|
|
|
def bedrock_decode(chunk)
|
2024-05-08 22:11:50 -04:00
|
|
|
@decoder ||= Aws::EventStream::Decoder.new
|
|
|
|
|
|
|
|
decoded, _done = @decoder.decode_chunk(chunk)
|
|
|
|
|
|
|
|
messages = []
|
|
|
|
return messages if !decoded
|
|
|
|
|
|
|
|
i = 0
|
|
|
|
while decoded
|
|
|
|
parsed = JSON.parse(decoded.payload.string)
|
2024-12-05 15:45:58 -05:00
|
|
|
if exception = decoded.headers[":exception-type"]
|
|
|
|
Rails.logger.error("#{self.class.name}: #{exception}: #{parsed}")
|
|
|
|
# TODO based on how often this happens, we may want to raise so we
|
|
|
|
# can retry, this may catch rate limits for example
|
|
|
|
end
|
2024-05-13 05:40:11 -04:00
|
|
|
# perhaps some control message we can just ignore
|
|
|
|
messages << Base64.decode64(parsed["bytes"]) if parsed && parsed["bytes"]
|
2024-05-08 22:11:50 -04:00
|
|
|
|
|
|
|
decoded, _done = @decoder.decode_chunk
|
|
|
|
|
|
|
|
i += 1
|
|
|
|
if i > 10_000
|
|
|
|
Rails.logger.error(
|
|
|
|
"DiscourseAI: Stream decoder looped too many times, logic error needs fixing",
|
|
|
|
)
|
|
|
|
break
|
|
|
|
end
|
2024-02-01 06:01:07 -05:00
|
|
|
end
|
2024-05-08 22:11:50 -04:00
|
|
|
|
|
|
|
messages
|
2023-11-23 10:58:54 -05:00
|
|
|
rescue JSON::ParserError,
|
|
|
|
Aws::EventStream::Errors::MessageChecksumError,
|
|
|
|
Aws::EventStream::Errors::PreludeChecksumError => e
|
|
|
|
Rails.logger.error("#{self.class.name}: #{e.message}")
|
2024-11-11 16:14:30 -05:00
|
|
|
[]
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
2024-03-18 15:48:46 -04:00
|
|
|
def final_log_update(log)
|
2024-06-05 18:34:23 -04:00
|
|
|
log.request_tokens = processor.input_tokens if processor.input_tokens
|
|
|
|
log.response_tokens = processor.output_tokens if processor.output_tokens
|
2024-11-11 16:14:30 -05:00
|
|
|
log.raw_response_payload = @raw_response
|
2024-06-05 18:34:23 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def processor
|
2024-12-05 15:45:58 -05:00
|
|
|
if dialect.is_a?(DiscourseAi::Completions::Dialects::Claude)
|
|
|
|
@processor ||=
|
|
|
|
DiscourseAi::Completions::AnthropicMessageProcessor.new(
|
|
|
|
streaming_mode: @streaming_mode,
|
|
|
|
)
|
|
|
|
else
|
|
|
|
@processor ||=
|
|
|
|
DiscourseAi::Completions::NovaMessageProcessor.new(streaming_mode: @streaming_mode)
|
|
|
|
end
|
2024-06-05 18:34:23 -04:00
|
|
|
end
|
|
|
|
|
2024-11-11 16:14:30 -05:00
|
|
|
def xml_tools_enabled?
|
|
|
|
!@native_tool_support
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|