discourse-ai/spec/shared/inference/openai_completions_spec.rb
Sam d59ed1091b
FEATURE: add support for GPT <-> Forum integration
This change-set connects GPT based chat with the forum it runs on. Allowing it to perform search, lookup tags and categories and summarize topics. 

The integration is currently restricted to public portions of the forum. 

Changes made:

- Do not run ai reply job for small actions
- Improved composable system prompt
- Trivial summarizer for topics
- Image generator 
- Google command for searching via Google
- Corrected trimming of posts raw (was replacing with numbers) 
- Bypass of problem specs

The feature works best with GPT-4


---------

Co-authored-by: Roman Rizzi <rizziromanalejandro@gmail.com>
2023-05-20 17:45:54 +10:00

83 lines
2.5 KiB
Ruby

# frozen_string_literal: true
require "rails_helper"
require_relative "../../support/openai_completions_inference_stubs"
describe DiscourseAi::Inference::OpenAiCompletions do
before { SiteSetting.ai_openai_api_key = "abc-123" }
it "can complete a trivial prompt" do
response_text = "1. Serenity\\n2. Laughter\\n3. Adventure"
prompt = [role: "user", content: "write 3 words"]
user_id = 183
req_opts = { temperature: 0.5, top_p: 0.8, max_tokens: 700 }
OpenAiCompletionsInferenceStubs.stub_response(prompt, response_text, req_opts: req_opts)
completions =
DiscourseAi::Inference::OpenAiCompletions.perform!(
prompt,
"gpt-3.5-turbo",
temperature: 0.5,
top_p: 0.8,
max_tokens: 700,
user_id: user_id,
)
expect(completions.dig(:choices, 0, :message, :content)).to eq(response_text)
expect(AiApiAuditLog.count).to eq(1)
log = AiApiAuditLog.first
body = { model: "gpt-3.5-turbo", messages: prompt }.merge(req_opts).to_json
request_body = OpenAiCompletionsInferenceStubs.response(response_text).to_json
expect(log.provider_id).to eq(AiApiAuditLog::Provider::OpenAI)
expect(log.request_tokens).to eq(337)
expect(log.response_tokens).to eq(162)
expect(log.raw_request_payload).to eq(body)
expect(log.raw_response_payload).to eq(request_body)
end
it "can operate in streaming mode" do
deltas = [
{ role: "assistant" },
{ content: "Mount" },
{ content: "ain" },
{ content: " " },
{ content: "Tree " },
{ content: "Frog" },
]
prompt = [role: "user", content: "write 3 words"]
content = +""
OpenAiCompletionsInferenceStubs.stub_streamed_response(
prompt,
deltas,
req_opts: {
stream: true,
},
)
DiscourseAi::Inference::OpenAiCompletions.perform!(prompt, "gpt-3.5-turbo") do |partial, cancel|
data = partial.dig(:choices, 0, :delta, :content)
content << data if data
cancel.call if content.split(" ").length == 2
end
expect(content).to eq("Mountain Tree ")
expect(AiApiAuditLog.count).to eq(1)
log = AiApiAuditLog.first
request_body = { model: "gpt-3.5-turbo", messages: prompt, stream: true }.to_json
expect(log.provider_id).to eq(AiApiAuditLog::Provider::OpenAI)
expect(log.request_tokens).to eq(4)
expect(log.response_tokens).to eq(3)
expect(log.raw_request_payload).to eq(request_body)
expect(log.raw_response_payload).to be_present
end
end