From ba3c3951cfa364d0709091f6e4fef7ad2a3ef65a Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 5 Feb 2024 11:16:36 +1100 Subject: [PATCH] FIX: typo causing text_embedding_3_large to fail (#460) --- .../text_embedding_3_large.rb | 2 +- .../text_embedding_3_large_spec.rb | 22 +++++++++++++++++++ .../text_embedding_3_small_spec.rb | 15 +++++++++++++ spec/support/embeddings_generation_stubs.rb | 4 ++-- 4 files changed, 40 insertions(+), 3 deletions(-) create mode 100644 spec/lib/modules/embeddings/vector_representations/text_embedding_3_large_spec.rb create mode 100644 spec/lib/modules/embeddings/vector_representations/text_embedding_3_small_spec.rb diff --git a/lib/embeddings/vector_representations/text_embedding_3_large.rb b/lib/embeddings/vector_representations/text_embedding_3_large.rb index f7d478bf..65cae0ed 100644 --- a/lib/embeddings/vector_representations/text_embedding_3_large.rb +++ b/lib/embeddings/vector_representations/text_embedding_3_large.rb @@ -48,7 +48,7 @@ module DiscourseAi response = DiscourseAi::Inference::OpenAiEmbeddings.perform!( text, - model: self.clas.name, + model: self.class.name, dimensions: dimensions, ) response[:data].first[:embedding] diff --git a/spec/lib/modules/embeddings/vector_representations/text_embedding_3_large_spec.rb b/spec/lib/modules/embeddings/vector_representations/text_embedding_3_large_spec.rb new file mode 100644 index 00000000..5bed2863 --- /dev/null +++ b/spec/lib/modules/embeddings/vector_representations/text_embedding_3_large_spec.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +require_relative "vector_rep_shared_examples" + +RSpec.describe DiscourseAi::Embeddings::VectorRepresentations::TextEmbedding3Large do + subject(:vector_rep) { described_class.new(truncation) } + + let(:truncation) { DiscourseAi::Embeddings::Strategies::Truncation.new } + + def stub_vector_mapping(text, expected_embedding) + EmbeddingsGenerationStubs.openai_service( + described_class.name, + text, + expected_embedding, + extra_args: { + dimensions: 2000, + }, + ) + end + + it_behaves_like "generates and store embedding using with vector representation" +end diff --git a/spec/lib/modules/embeddings/vector_representations/text_embedding_3_small_spec.rb b/spec/lib/modules/embeddings/vector_representations/text_embedding_3_small_spec.rb new file mode 100644 index 00000000..1f4f01c2 --- /dev/null +++ b/spec/lib/modules/embeddings/vector_representations/text_embedding_3_small_spec.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require_relative "vector_rep_shared_examples" + +RSpec.describe DiscourseAi::Embeddings::VectorRepresentations::TextEmbedding3Small do + subject(:vector_rep) { described_class.new(truncation) } + + let(:truncation) { DiscourseAi::Embeddings::Strategies::Truncation.new } + + def stub_vector_mapping(text, expected_embedding) + EmbeddingsGenerationStubs.openai_service(described_class.name, text, expected_embedding) + end + + it_behaves_like "generates and store embedding using with vector representation" +end diff --git a/spec/support/embeddings_generation_stubs.rb b/spec/support/embeddings_generation_stubs.rb index 5d688b94..06ad6352 100644 --- a/spec/support/embeddings_generation_stubs.rb +++ b/spec/support/embeddings_generation_stubs.rb @@ -13,10 +13,10 @@ class EmbeddingsGenerationStubs .to_return(status: 200, body: JSON.dump(embedding)) end - def openai_service(model, string, embedding) + def openai_service(model, string, embedding, extra_args: {}) WebMock .stub_request(:post, "https://api.openai.com/v1/embeddings") - .with(body: JSON.dump({ model: model, input: string })) + .with(body: JSON.dump({ model: model, input: string }.merge(extra_args))) .to_return(status: 200, body: JSON.dump({ data: [{ embedding: embedding }] })) end end