From 20612fde52d3f740cad64823ef8aadb0748b567f Mon Sep 17 00:00:00 2001 From: Sam Date: Mon, 13 Jan 2025 17:01:01 +1100 Subject: [PATCH] FEATURE: add the ability to disable streaming on an Open AI LLM Disabling streaming is required for models such o1 that do not have streaming enabled yet It is good to carry this feature around in case various apis decide not to support streaming endpoints and Discourse AI can continue to work just as it did before. Also: fixes issue where sharing artifacts would miss viewport leading to tiny artifacts on mobile --- .../ai_bot/artifacts_controller.rb | 1 + app/models/llm_model.rb | 3 ++ config/locales/client.en.yml | 1 + lib/completions/endpoints/base.rb | 25 +++++++++++++ lib/completions/endpoints/open_ai.rb | 4 ++ .../lib/completions/endpoints/open_ai_spec.rb | 37 +++++++++++++++++++ 6 files changed, 71 insertions(+) diff --git a/app/controllers/discourse_ai/ai_bot/artifacts_controller.rb b/app/controllers/discourse_ai/ai_bot/artifacts_controller.rb index ff7f3260..aaec3b26 100644 --- a/app/controllers/discourse_ai/ai_bot/artifacts_controller.rb +++ b/app/controllers/discourse_ai/ai_bot/artifacts_controller.rb @@ -57,6 +57,7 @@ module DiscourseAi #{ERB::Util.html_escape(name)} +