2024-01-29 11:24:30 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module Embeddings
|
|
|
|
module VectorRepresentations
|
|
|
|
class TextEmbedding3Large < Base
|
2024-02-01 14:54:09 -05:00
|
|
|
class << self
|
|
|
|
def name
|
|
|
|
"text-embedding-3-large"
|
|
|
|
end
|
|
|
|
|
|
|
|
def correctly_configured?
|
|
|
|
SiteSetting.ai_openai_api_key.present?
|
|
|
|
end
|
|
|
|
|
|
|
|
def dependant_setting_names
|
|
|
|
%w[ai_openai_api_key]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-01-29 11:24:30 -05:00
|
|
|
def id
|
|
|
|
7
|
|
|
|
end
|
|
|
|
|
|
|
|
def version
|
|
|
|
1
|
|
|
|
end
|
|
|
|
|
|
|
|
def dimensions
|
|
|
|
# real dimentions are 3072, but we only support up to 2000 in the
|
|
|
|
# indexes, so we downsample to 2000 via API
|
|
|
|
2000
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_sequence_length
|
|
|
|
8191
|
|
|
|
end
|
|
|
|
|
|
|
|
def pg_function
|
|
|
|
"<=>"
|
|
|
|
end
|
|
|
|
|
|
|
|
def pg_index_type
|
2024-08-08 10:55:20 -04:00
|
|
|
"halfvec_cosine_ops"
|
2024-01-29 11:24:30 -05:00
|
|
|
end
|
|
|
|
|
2024-03-08 11:02:50 -05:00
|
|
|
def vector_from(text, asymetric: false)
|
2024-01-29 11:24:30 -05:00
|
|
|
response =
|
|
|
|
DiscourseAi::Inference::OpenAiEmbeddings.perform!(
|
|
|
|
text,
|
2024-02-04 19:16:36 -05:00
|
|
|
model: self.class.name,
|
2024-01-29 11:24:30 -05:00
|
|
|
dimensions: dimensions,
|
|
|
|
)
|
|
|
|
response[:data].first[:embedding]
|
|
|
|
end
|
|
|
|
|
|
|
|
def tokenizer
|
|
|
|
DiscourseAi::Tokenizer::OpenAiTokenizer
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|