2023-03-15 16:02:20 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
require_relative "../../support/openai_completions_inference_stubs"
|
|
|
|
|
|
|
|
RSpec.describe DiscourseAi::AiHelper::AssistantController do
|
|
|
|
describe "#suggest" do
|
|
|
|
let(:text) { OpenAiCompletionsInferenceStubs.translated_response }
|
2023-03-17 14:14:19 -04:00
|
|
|
let(:mode) { "proofread" }
|
2023-03-15 16:02:20 -04:00
|
|
|
|
|
|
|
context "when not logged in" do
|
|
|
|
it "returns a 403 response" do
|
|
|
|
post "/discourse-ai/ai-helper/suggest", params: { text: text, mode: mode }
|
|
|
|
|
|
|
|
expect(response.status).to eq(403)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when logged in as an user without enough privileges" do
|
|
|
|
fab!(:user) { Fabricate(:newuser) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
sign_in(user)
|
|
|
|
SiteSetting.ai_helper_allowed_groups = Group::AUTO_GROUPS[:staff]
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns a 403 response" do
|
|
|
|
post "/discourse-ai/ai-helper/suggest", params: { text: text, mode: mode }
|
|
|
|
|
|
|
|
expect(response.status).to eq(403)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when logged in as an allowed user" do
|
|
|
|
fab!(:user) { Fabricate(:user) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
sign_in(user)
|
|
|
|
user.group_ids = [Group::AUTO_GROUPS[:trust_level_1]]
|
|
|
|
SiteSetting.ai_helper_allowed_groups = Group::AUTO_GROUPS[:trust_level_1]
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns a 400 if the helper mode is invalid" do
|
|
|
|
invalid_mode = "asd"
|
|
|
|
|
|
|
|
post "/discourse-ai/ai-helper/suggest", params: { text: text, mode: invalid_mode }
|
|
|
|
|
|
|
|
expect(response.status).to eq(400)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns a 400 if the text is blank" do
|
|
|
|
post "/discourse-ai/ai-helper/suggest", params: { mode: mode }
|
|
|
|
|
|
|
|
expect(response.status).to eq(400)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns a suggestion" do
|
|
|
|
OpenAiCompletionsInferenceStubs.stub_prompt(mode)
|
|
|
|
|
|
|
|
post "/discourse-ai/ai-helper/suggest", params: { mode: mode, text: text }
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
expect(response.parsed_body["suggestions"].first).to eq(
|
|
|
|
OpenAiCompletionsInferenceStubs.proofread_response.strip,
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|