FIX: gemini 0801 tool calls (#748)
Gemini experimental model requires tool_config. Previously defaults would apply. This corrects prompts containing multiple tools on gemini.
This commit is contained in:
parent
a7c0ddba97
commit
cf1e15ef12
|
@ -65,7 +65,10 @@ module DiscourseAi
|
||||||
role: "system",
|
role: "system",
|
||||||
parts: [{ text: prompt[:system_instruction].to_s }],
|
parts: [{ text: prompt[:system_instruction].to_s }],
|
||||||
} if prompt[:system_instruction].present?
|
} if prompt[:system_instruction].present?
|
||||||
payload[:tools] = tools if tools.present?
|
if tools.present?
|
||||||
|
payload[:tools] = tools
|
||||||
|
payload[:tool_config] = { function_calling_config: { mode: "AUTO" } }
|
||||||
|
end
|
||||||
payload[:generationConfig].merge!(model_params) if model_params.present?
|
payload[:generationConfig].merge!(model_params) if model_params.present?
|
||||||
payload
|
payload
|
||||||
end
|
end
|
||||||
|
|
|
@ -145,6 +145,43 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Gemini do
|
||||||
EndpointsCompliance.new(self, endpoint, DiscourseAi::Completions::Dialects::Gemini, user)
|
EndpointsCompliance.new(self, endpoint, DiscourseAi::Completions::Dialects::Gemini, user)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
let(:echo_tool) do
|
||||||
|
{
|
||||||
|
name: "echo",
|
||||||
|
description: "echo something",
|
||||||
|
parameters: [{ name: "text", type: "string", description: "text to echo", required: true }],
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
# by default gemini is meant to use AUTO mode, however new experimental models
|
||||||
|
# appear to require this to be explicitly set
|
||||||
|
it "Explicitly specifies tool config" do
|
||||||
|
prompt = DiscourseAi::Completions::Prompt.new("Hello", tools: [echo_tool])
|
||||||
|
|
||||||
|
response = gemini_mock.response("World").to_json
|
||||||
|
|
||||||
|
req_body = nil
|
||||||
|
|
||||||
|
llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}")
|
||||||
|
url = "#{model.url}:generateContent?key=123"
|
||||||
|
|
||||||
|
stub_request(:post, url).with(
|
||||||
|
body:
|
||||||
|
proc do |_req_body|
|
||||||
|
req_body = _req_body
|
||||||
|
true
|
||||||
|
end,
|
||||||
|
).to_return(status: 200, body: response)
|
||||||
|
|
||||||
|
response = llm.generate(prompt, user: user)
|
||||||
|
|
||||||
|
expect(response).to eq("World")
|
||||||
|
|
||||||
|
parsed = JSON.parse(req_body, symbolize_names: true)
|
||||||
|
|
||||||
|
expect(parsed[:tool_config]).to eq({ function_calling_config: { mode: "AUTO" } })
|
||||||
|
end
|
||||||
|
|
||||||
it "Supports Vision API" do
|
it "Supports Vision API" do
|
||||||
prompt =
|
prompt =
|
||||||
DiscourseAi::Completions::Prompt.new(
|
DiscourseAi::Completions::Prompt.new(
|
||||||
|
|
Loading…
Reference in New Issue