2023-11-23 10:58:54 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module Completions
|
|
|
|
module Endpoints
|
2023-11-28 23:17:46 -05:00
|
|
|
class HuggingFace < Base
|
2024-01-29 14:04:25 -05:00
|
|
|
class << self
|
2024-05-13 14:54:42 -04:00
|
|
|
def can_contact?(endpoint_name)
|
|
|
|
endpoint_name == "hugging_face"
|
2024-01-29 14:04:25 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def dependant_setting_names
|
|
|
|
%w[ai_hugging_face_api_url]
|
|
|
|
end
|
|
|
|
|
|
|
|
def correctly_configured?(_model_name)
|
|
|
|
SiteSetting.ai_hugging_face_api_url.present?
|
|
|
|
end
|
|
|
|
|
|
|
|
def endpoint_name(model_name)
|
|
|
|
"Hugging Face - #{model_name}"
|
|
|
|
end
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
2024-01-04 07:53:47 -05:00
|
|
|
def normalize_model_params(model_params)
|
|
|
|
model_params = model_params.dup
|
|
|
|
|
2024-05-07 09:02:16 -04:00
|
|
|
# max_tokens, temperature are already supported
|
2024-01-04 07:53:47 -05:00
|
|
|
if model_params[:stop_sequences]
|
|
|
|
model_params[:stop] = model_params.delete(:stop_sequences)
|
|
|
|
end
|
|
|
|
|
|
|
|
model_params
|
|
|
|
end
|
|
|
|
|
2024-05-07 09:02:16 -04:00
|
|
|
def default_options
|
|
|
|
{ model: model, temperature: 0.7 }
|
|
|
|
end
|
|
|
|
|
2023-11-23 10:58:54 -05:00
|
|
|
def provider_id
|
|
|
|
AiApiAuditLog::Provider::HuggingFaceTextGeneration
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def model_uri
|
2024-05-16 08:50:22 -04:00
|
|
|
URI(llm_model&.url || SiteSetting.ai_hugging_face_api_url)
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
2023-12-18 16:06:01 -05:00
|
|
|
def prepare_payload(prompt, model_params, _dialect)
|
2023-11-23 10:58:54 -05:00
|
|
|
default_options
|
2024-05-07 09:02:16 -04:00
|
|
|
.merge(model_params)
|
|
|
|
.merge(messages: prompt)
|
2023-11-23 10:58:54 -05:00
|
|
|
.tap do |payload|
|
2024-05-07 09:02:16 -04:00
|
|
|
if !payload[:max_tokens]
|
2024-05-16 08:50:22 -04:00
|
|
|
token_limit =
|
|
|
|
llm_model&.max_prompt_tokens || SiteSetting.ai_hugging_face_token_limit
|
2023-11-23 10:58:54 -05:00
|
|
|
|
2024-05-07 09:02:16 -04:00
|
|
|
payload[:max_tokens] = token_limit - prompt_size(prompt)
|
|
|
|
end
|
2023-12-06 09:22:42 -05:00
|
|
|
|
|
|
|
payload[:stream] = true if @streaming_mode
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def prepare_request(payload)
|
2024-05-16 08:50:22 -04:00
|
|
|
api_key = llm_model&.api_key || SiteSetting.ai_hugging_face_api_key
|
|
|
|
|
2023-11-23 10:58:54 -05:00
|
|
|
headers =
|
|
|
|
{ "Content-Type" => "application/json" }.tap do |h|
|
2024-05-16 08:50:22 -04:00
|
|
|
h["Authorization"] = "Bearer #{api_key}" if api_key.present?
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
Net::HTTP::Post.new(model_uri, headers).tap { |r| r.body = payload }
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_completion_from(response_raw)
|
2024-05-07 09:02:16 -04:00
|
|
|
parsed = JSON.parse(response_raw, symbolize_names: true).dig(:choices, 0)
|
|
|
|
# half a line sent here
|
|
|
|
return if !parsed
|
2023-11-23 10:58:54 -05:00
|
|
|
|
2024-05-07 09:02:16 -04:00
|
|
|
response_h = @streaming_mode ? parsed.dig(:delta) : parsed.dig(:message)
|
2023-11-23 10:58:54 -05:00
|
|
|
|
2024-05-07 09:02:16 -04:00
|
|
|
response_h.dig(:content)
|
2023-11-23 10:58:54 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def partials_from(decoded_chunk)
|
|
|
|
decoded_chunk
|
|
|
|
.split("\n")
|
|
|
|
.map do |line|
|
2023-12-06 09:22:42 -05:00
|
|
|
data = line.split("data:", 2)[1]
|
2023-11-23 10:58:54 -05:00
|
|
|
data&.squish == "[DONE]" ? nil : data
|
|
|
|
end
|
|
|
|
.compact
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|