From 8c8fd969ef735bfb49b82652aacafbf5b9e184a5 Mon Sep 17 00:00:00 2001 From: Roman Rizzi Date: Wed, 11 Jun 2025 20:38:58 -0300 Subject: [PATCH] FIX: Don't check for #blank? when manipulating chunks (#1428) --- lib/completions/json_streaming_tracker.rb | 2 +- spec/lib/completions/endpoints/gemini_spec.rb | 4 +++- spec/lib/completions/structured_output_spec.rb | 8 ++++++++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/lib/completions/json_streaming_tracker.rb b/lib/completions/json_streaming_tracker.rb index 5e1630ef..26771e9f 100644 --- a/lib/completions/json_streaming_tracker.rb +++ b/lib/completions/json_streaming_tracker.rb @@ -74,7 +74,7 @@ module DiscourseAi private def try_escape_and_parse(raw_json) - if raw_json.blank? || !raw_json.is_a?(String) + if !raw_json.is_a?(String) @broken = true return end diff --git a/spec/lib/completions/endpoints/gemini_spec.rb b/spec/lib/completions/endpoints/gemini_spec.rb index 59d76cac..1421e0f7 100644 --- a/spec/lib/completions/endpoints/gemini_spec.rb +++ b/spec/lib/completions/endpoints/gemini_spec.rb @@ -544,7 +544,9 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Gemini do data: {"candidates": [{"content": {"parts": [{"text": "Hello!"}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 399,"candidatesTokenCount": 191,"totalTokenCount": 590},"modelVersion": "gemini-1.5-pro-002"} - data: {"candidates": [{"content": {"parts": [{"text": "\\n there"}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 399,"candidatesTokenCount": 191,"totalTokenCount": 590},"modelVersion": "gemini-1.5-pro-002"} + data: {"candidates": [{"content": {"parts": [{"text": "\\n "}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 399,"candidatesTokenCount": 191,"totalTokenCount": 590},"modelVersion": "gemini-1.5-pro-002"} + + data: {"candidates": [{"content": {"parts": [{"text": "there"}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 399,"candidatesTokenCount": 191,"totalTokenCount": 590},"modelVersion": "gemini-1.5-pro-002"} data: {"candidates": [{"content": {"parts": [{"text": "\\","}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 399,"candidatesTokenCount": 191,"totalTokenCount": 590},"modelVersion": "gemini-1.5-pro-002"} diff --git a/spec/lib/completions/structured_output_spec.rb b/spec/lib/completions/structured_output_spec.rb index 8483b691..c4c5d882 100644 --- a/spec/lib/completions/structured_output_spec.rb +++ b/spec/lib/completions/structured_output_spec.rb @@ -112,6 +112,14 @@ RSpec.describe DiscourseAi::Completions::StructuredOutput do ["Hello! I am a chunk", "There"], ) end + + it "handles empty newline chunks" do + chunks = [+"{\"", +"message", +"\":\"", +"Hello!", +"\n", +"\"", +"}"] + + chunks.each { |c| structured_output << c } + + expect(structured_output.read_buffered_property(:message)).to eq("Hello!\n") + end end describe "dealing with non-JSON responses" do