Hoa Nguyen 2063b3854f
FEATURE: Add Ollama provider (#812)
This allows our users to add the Ollama provider and use it to serve our AI bot (completion/dialect).

In this PR, we introduce:

    DiscourseAi::Completions::Dialects::Ollama which would help us translate by utilizing Completions::Endpoint::Ollama
    Correct extract_completion_from and partials_from in Endpoints::Ollama

Also

    Add tests for Endpoints::Ollama
    Introduce ollama_model fabricator
2024-10-01 10:45:03 +10:00

37 lines
1.1 KiB
Ruby

# frozen_string_literal: true
require_relative "dialect_context"
RSpec.describe DiscourseAi::Completions::Dialects::Ollama do
fab!(:model) { Fabricate(:ollama_model) }
let(:context) { DialectContext.new(described_class, model) }
describe "#translate" do
it "translates a prompt written in our generic format to the Ollama format" do
ollama_version = [
{ role: "system", content: context.system_insts },
{ role: "user", content: context.simple_user_input },
]
translated = context.system_user_scenario
expect(translated).to eq(ollama_version)
end
it "trims content if it's getting too long" do
model.max_prompt_tokens = 5000
translated = context.long_user_input_scenario
expect(translated.last[:role]).to eq("user")
expect(translated.last[:content].length).to be < context.long_message_text.length
end
end
describe "#max_prompt_tokens" do
it "returns the max_prompt_tokens from the llm_model" do
model.max_prompt_tokens = 10_000
expect(context.dialect(nil).max_prompt_tokens).to eq(10_000)
end
end
end