diff --git a/lib/completions/endpoints/gemini.rb b/lib/completions/endpoints/gemini.rb index b1e2ce9a..92ac29ac 100644 --- a/lib/completions/endpoints/gemini.rb +++ b/lib/completions/endpoints/gemini.rb @@ -65,7 +65,10 @@ module DiscourseAi role: "system", parts: [{ text: prompt[:system_instruction].to_s }], } if prompt[:system_instruction].present? - payload[:tools] = tools if tools.present? + if tools.present? + payload[:tools] = tools + payload[:tool_config] = { function_calling_config: { mode: "AUTO" } } + end payload[:generationConfig].merge!(model_params) if model_params.present? payload end diff --git a/spec/lib/completions/endpoints/gemini_spec.rb b/spec/lib/completions/endpoints/gemini_spec.rb index 55c7022b..b02d5bb5 100644 --- a/spec/lib/completions/endpoints/gemini_spec.rb +++ b/spec/lib/completions/endpoints/gemini_spec.rb @@ -145,6 +145,43 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Gemini do EndpointsCompliance.new(self, endpoint, DiscourseAi::Completions::Dialects::Gemini, user) end + let(:echo_tool) do + { + name: "echo", + description: "echo something", + parameters: [{ name: "text", type: "string", description: "text to echo", required: true }], + } + end + + # by default gemini is meant to use AUTO mode, however new experimental models + # appear to require this to be explicitly set + it "Explicitly specifies tool config" do + prompt = DiscourseAi::Completions::Prompt.new("Hello", tools: [echo_tool]) + + response = gemini_mock.response("World").to_json + + req_body = nil + + llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + url = "#{model.url}:generateContent?key=123" + + stub_request(:post, url).with( + body: + proc do |_req_body| + req_body = _req_body + true + end, + ).to_return(status: 200, body: response) + + response = llm.generate(prompt, user: user) + + expect(response).to eq("World") + + parsed = JSON.parse(req_body, symbolize_names: true) + + expect(parsed[:tool_config]).to eq({ function_calling_config: { mode: "AUTO" } }) + end + it "Supports Vision API" do prompt = DiscourseAi::Completions::Prompt.new(