2023-12-15 12:32:01 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module Completions
|
|
|
|
module Endpoints
|
|
|
|
class Gemini < Base
|
2024-01-29 14:04:25 -05:00
|
|
|
class << self
|
|
|
|
def can_contact?(endpoint_name, model_name)
|
|
|
|
return false unless endpoint_name == "google"
|
2024-04-17 01:37:19 -04:00
|
|
|
%w[gemini-pro gemini-1.5-pro].include?(model_name)
|
2024-01-29 14:04:25 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def dependant_setting_names
|
|
|
|
%w[ai_gemini_api_key]
|
|
|
|
end
|
|
|
|
|
|
|
|
def correctly_configured?(_model_name)
|
|
|
|
SiteSetting.ai_gemini_api_key.present?
|
|
|
|
end
|
|
|
|
|
|
|
|
def endpoint_name(model_name)
|
|
|
|
"Google - #{model_name}"
|
|
|
|
end
|
2023-12-15 12:32:01 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def default_options
|
2024-01-04 07:53:47 -05:00
|
|
|
{ generationConfig: {} }
|
|
|
|
end
|
|
|
|
|
|
|
|
def normalize_model_params(model_params)
|
|
|
|
model_params = model_params.dup
|
|
|
|
|
|
|
|
if model_params[:stop_sequences]
|
|
|
|
model_params[:stopSequences] = model_params.delete(:stop_sequences)
|
|
|
|
end
|
|
|
|
|
2024-01-30 17:58:25 -05:00
|
|
|
if model_params[:max_tokens]
|
2024-01-04 07:53:47 -05:00
|
|
|
model_params[:maxOutputTokens] = model_params.delete(:max_tokens)
|
|
|
|
end
|
|
|
|
|
2024-01-30 17:58:25 -05:00
|
|
|
model_params[:topP] = model_params.delete(:top_p) if model_params[:top_p]
|
|
|
|
|
2024-01-04 07:53:47 -05:00
|
|
|
# temperature already supported
|
|
|
|
|
|
|
|
model_params
|
2023-12-15 12:32:01 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def provider_id
|
|
|
|
AiApiAuditLog::Provider::Gemini
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def model_uri
|
2024-04-17 01:37:19 -04:00
|
|
|
mapped_model = model == "gemini-1.5-pro" ? "gemini-1.5-pro-latest" : model
|
2023-12-15 12:32:01 -05:00
|
|
|
url =
|
2024-04-17 01:37:19 -04:00
|
|
|
"https://generativelanguage.googleapis.com/v1beta/models/#{mapped_model}:#{@streaming_mode ? "streamGenerateContent" : "generateContent"}?key=#{SiteSetting.ai_gemini_api_key}"
|
2023-12-15 12:32:01 -05:00
|
|
|
|
|
|
|
URI(url)
|
|
|
|
end
|
|
|
|
|
2023-12-18 16:06:01 -05:00
|
|
|
def prepare_payload(prompt, model_params, dialect)
|
2024-01-04 16:15:34 -05:00
|
|
|
tools = dialect.tools
|
|
|
|
|
2023-12-18 16:06:01 -05:00
|
|
|
default_options
|
|
|
|
.merge(contents: prompt)
|
2024-01-04 07:53:47 -05:00
|
|
|
.tap do |payload|
|
2024-01-04 16:15:34 -05:00
|
|
|
payload[:tools] = tools if tools.present?
|
2024-01-04 07:53:47 -05:00
|
|
|
payload[:generationConfig].merge!(model_params) if model_params.present?
|
|
|
|
end
|
2023-12-15 12:32:01 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def prepare_request(payload)
|
|
|
|
headers = { "Content-Type" => "application/json" }
|
|
|
|
|
|
|
|
Net::HTTP::Post.new(model_uri, headers).tap { |r| r.body = payload }
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_completion_from(response_raw)
|
2024-01-04 16:15:34 -05:00
|
|
|
parsed =
|
|
|
|
if @streaming_mode
|
|
|
|
response_raw
|
|
|
|
else
|
|
|
|
JSON.parse(response_raw, symbolize_names: true)
|
|
|
|
end
|
2023-12-18 16:06:01 -05:00
|
|
|
response_h = parsed.dig(:candidates, 0, :content, :parts, 0)
|
2023-12-15 12:32:01 -05:00
|
|
|
|
2024-01-02 09:21:13 -05:00
|
|
|
@has_function_call ||= response_h.dig(:functionCall).present?
|
|
|
|
@has_function_call ? response_h[:functionCall] : response_h.dig(:text)
|
2023-12-15 12:32:01 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def partials_from(decoded_chunk)
|
2024-01-04 16:15:34 -05:00
|
|
|
begin
|
|
|
|
JSON.parse(decoded_chunk, symbolize_names: true)
|
|
|
|
rescue JSON::ParserError
|
|
|
|
[]
|
|
|
|
end
|
2023-12-15 12:32:01 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def extract_prompt_for_tokenizer(prompt)
|
|
|
|
prompt.to_s
|
|
|
|
end
|
|
|
|
|
2024-01-02 09:21:13 -05:00
|
|
|
def has_tool?(_response_data)
|
|
|
|
@has_function_call
|
2023-12-18 16:06:01 -05:00
|
|
|
end
|
|
|
|
|
2024-04-18 16:54:54 -04:00
|
|
|
def native_tool_support?
|
|
|
|
true
|
2024-03-07 14:37:23 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def add_to_function_buffer(function_buffer, payload: nil, partial: nil)
|
|
|
|
if @streaming_mode
|
|
|
|
return function_buffer if !partial
|
|
|
|
else
|
|
|
|
partial = payload
|
2023-12-18 16:06:01 -05:00
|
|
|
end
|
|
|
|
|
2024-03-07 14:37:23 -05:00
|
|
|
function_buffer.at("tool_name").content = partial[:name] if partial[:name].present?
|
|
|
|
|
2023-12-18 16:06:01 -05:00
|
|
|
if partial[:args]
|
|
|
|
argument_fragments =
|
|
|
|
partial[:args].reduce(+"") do |memo, (arg_name, value)|
|
|
|
|
memo << "\n<#{arg_name}>#{value}</#{arg_name}>"
|
|
|
|
end
|
|
|
|
argument_fragments << "\n"
|
|
|
|
|
|
|
|
function_buffer.at("parameters").children =
|
|
|
|
Nokogiri::HTML5::DocumentFragment.parse(argument_fragments)
|
|
|
|
end
|
|
|
|
|
|
|
|
function_buffer
|
|
|
|
end
|
2023-12-15 12:32:01 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|