FIX: Gemini inference client was missing #instance (#1019)
This commit is contained in:
parent
700e9de073
commit
6da35d8e66
|
@ -43,7 +43,7 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def vector_from(text, asymetric: false)
|
||||
inference_client.perform!(text).dig(:embedding, :values)
|
||||
inference_client.perform!(text)
|
||||
end
|
||||
|
||||
# There is no public tokenizer for Gemini, and from the ones we already ship in the plugin
|
||||
|
|
|
@ -3,6 +3,10 @@
|
|||
module ::DiscourseAi
|
||||
module Inference
|
||||
class GeminiEmbeddings
|
||||
def self.instance
|
||||
new(SiteSetting.ai_gemini_api_key)
|
||||
end
|
||||
|
||||
def initialize(api_key, referer = Discourse.base_url)
|
||||
@api_key = api_key
|
||||
@referer = referer
|
||||
|
@ -21,7 +25,7 @@ module ::DiscourseAi
|
|||
|
||||
case response.status
|
||||
when 200
|
||||
JSON.parse(response.body, symbolize_names: true)
|
||||
JSON.parse(response.body, symbolize_names: true).dig(:embedding, :values)
|
||||
when 429
|
||||
# TODO add a AdminDashboard Problem?
|
||||
else
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative "vector_rep_shared_examples"
|
||||
|
||||
RSpec.describe DiscourseAi::Embeddings::VectorRepresentations::Gemini do
|
||||
subject(:vector_rep) { described_class.new(truncation) }
|
||||
|
||||
let(:truncation) { DiscourseAi::Embeddings::Strategies::Truncation.new }
|
||||
let!(:api_key) { "test-123" }
|
||||
|
||||
before { SiteSetting.ai_gemini_api_key = api_key }
|
||||
|
||||
def stub_vector_mapping(text, expected_embedding)
|
||||
EmbeddingsGenerationStubs.gemini_service(api_key, text, expected_embedding)
|
||||
end
|
||||
|
||||
it_behaves_like "generates and store embedding using with vector representation"
|
||||
end
|
|
@ -19,5 +19,15 @@ class EmbeddingsGenerationStubs
|
|||
.with(body: JSON.dump({ model: model, input: string }.merge(extra_args)))
|
||||
.to_return(status: 200, body: JSON.dump({ data: [{ embedding: embedding }] }))
|
||||
end
|
||||
|
||||
def gemini_service(api_key, string, embedding)
|
||||
WebMock
|
||||
.stub_request(
|
||||
:post,
|
||||
"https://generativelanguage.googleapis.com/v1beta/models/embedding-001:embedContent\?key\=#{api_key}",
|
||||
)
|
||||
.with(body: JSON.dump({ content: { parts: [{ text: string }] } }))
|
||||
.to_return(status: 200, body: JSON.dump({ embedding: { values: embedding } }))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue