FIX: Don't dig on nil when checking for the gemini schema (#1356)

This commit is contained in:
Roman Rizzi 2025-05-21 08:30:47 -03:00 committed by GitHub
parent 53905f65ac
commit e207eba1a4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 25 additions and 1 deletions

View File

@ -88,7 +88,7 @@ module DiscourseAi
payload[:generationConfig].merge!(model_params.except(:response_format))
# https://ai.google.dev/api/generate-content#generationconfig
gemini_schema = model_params[:response_format].dig(:json_schema, :schema)
gemini_schema = model_params.dig(:response_format, :json_schema, :schema)
if gemini_schema.present?
payload[:generationConfig][:responseSchema] = gemini_schema.except(

View File

@ -576,4 +576,28 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Gemini do
expect(parsed.dig(:generationConfig, :responseMimeType)).to eq("application/json")
end
end
it "includes model params in the request" do
response = <<~TEXT
data: {"candidates": [{"content": {"parts": [{"text": "Hello"}],"role": "model"}}],"usageMetadata": {"promptTokenCount": 399,"totalTokenCount": 399},"modelVersion": "gemini-1.5-pro-002"}
data: {"candidates": [{"content": {"parts": [{"text": "! This is a simple response"}],"role": "model"},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 399,"totalTokenCount": 399},"modelVersion": "gemini-1.5-pro-002"}
data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 399,"candidatesTokenCount": 191,"totalTokenCount": 590},"modelVersion": "gemini-1.5-pro-002"}
TEXT
llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}")
url = "#{model.url}:streamGenerateContent?alt=sse&key=123"
output = []
stub_request(:post, url).with(
body: hash_including(generationConfig: { temperature: 0.2 }),
).to_return(status: 200, body: response)
llm.generate("Hello", user: user, temperature: 0.2) { |partial| output << partial }
expect(output).to eq(["Hello", "! This is a simple response"])
end
end