mirror of
				https://github.com/discourse/discourse-ai.git
				synced 2025-10-31 14:38:37 +00:00 
			
		
		
		
	* DEV: Add icon support * DEV: Add basic setup for custom prompt menu * FEATURE: custom prompt backend * fix custom prompt param check * fix custom prompt replace * WIP * fix custom prompt usage * fixes * DEV: Update front-end * DEV: No more custom prompt state * DEV: Add specs * FIX: Title/Category/Tag suggestions Suggestion dropdowns broke because it `messages_with_user_input(user_input)` expects a hash now. * DEV: Apply syntax tree * DEV: Restrict custom prompts to configured groups * oops * fix tests * lint * I love tests * lint is cool tho --------- Co-authored-by: Rafael dos Santos Silva <xfalcox@gmail.com>
		
			
				
	
	
		
			70 lines
		
	
	
		
			2.1 KiB
		
	
	
	
		
			Ruby
		
	
	
	
	
	
			
		
		
	
	
			70 lines
		
	
	
		
			2.1 KiB
		
	
	
	
		
			Ruby
		
	
	
	
	
	
| # frozen_string_literal: true
 | |
| 
 | |
| require_relative "../../../support/openai_completions_inference_stubs"
 | |
| 
 | |
| RSpec.describe DiscourseAi::AiHelper::LlmPrompt do
 | |
|   let(:prompt) { CompletionPrompt.find_by(name: mode, provider: "openai") }
 | |
| 
 | |
|   describe "#generate_and_send_prompt" do
 | |
|     context "when using the translate mode" do
 | |
|       let(:mode) { OpenAiCompletionsInferenceStubs::TRANSLATE }
 | |
| 
 | |
|       before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
 | |
| 
 | |
|       it "Sends the prompt to chatGPT and returns the response" do
 | |
|         response =
 | |
|           subject.generate_and_send_prompt(
 | |
|             prompt,
 | |
|             { text: OpenAiCompletionsInferenceStubs.spanish_text },
 | |
|           )
 | |
| 
 | |
|         expect(response[:suggestions]).to contain_exactly(
 | |
|           OpenAiCompletionsInferenceStubs.translated_response.strip,
 | |
|         )
 | |
|       end
 | |
|     end
 | |
| 
 | |
|     context "when using the proofread mode" do
 | |
|       let(:mode) { OpenAiCompletionsInferenceStubs::PROOFREAD }
 | |
| 
 | |
|       before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
 | |
| 
 | |
|       it "Sends the prompt to chatGPT and returns the response" do
 | |
|         response =
 | |
|           subject.generate_and_send_prompt(
 | |
|             prompt,
 | |
|             { text: OpenAiCompletionsInferenceStubs.translated_response },
 | |
|           )
 | |
| 
 | |
|         expect(response[:suggestions]).to contain_exactly(
 | |
|           OpenAiCompletionsInferenceStubs.proofread_response.strip,
 | |
|         )
 | |
|       end
 | |
|     end
 | |
| 
 | |
|     context "when generating titles" do
 | |
|       let(:mode) { OpenAiCompletionsInferenceStubs::GENERATE_TITLES }
 | |
| 
 | |
|       before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
 | |
| 
 | |
|       it "returns an array with each title" do
 | |
|         expected =
 | |
|           OpenAiCompletionsInferenceStubs
 | |
|             .generated_titles
 | |
|             .gsub("\"", "")
 | |
|             .gsub(/\d./, "")
 | |
|             .split("\n")
 | |
|             .map(&:strip)
 | |
| 
 | |
|         response =
 | |
|           subject.generate_and_send_prompt(
 | |
|             prompt,
 | |
|             { text: OpenAiCompletionsInferenceStubs.translated_response },
 | |
|           )
 | |
| 
 | |
|         expect(response[:suggestions]).to contain_exactly(*expected)
 | |
|       end
 | |
|     end
 | |
|   end
 | |
| end
 |