mirror of
				https://github.com/discourse/discourse-ai.git
				synced 2025-10-31 06:28:48 +00:00 
			
		
		
		
	* DEV: Remove the summarization feature Instead, we'll register summarization implementations for OpenAI, Anthropic, and Discourse AI using the API defined in discourse/discourse#21813. Core and chat will implement features on top of these implementations instead of this plugin extending them. * Register instances that contain the model, requiring less site settings
		
			
				
	
	
		
			53 lines
		
	
	
		
			1.4 KiB
		
	
	
	
		
			Ruby
		
	
	
	
	
	
			
		
		
	
	
			53 lines
		
	
	
		
			1.4 KiB
		
	
	
	
		
			Ruby
		
	
	
	
	
	
| # frozen_string_literal: true
 | |
| 
 | |
| module DiscourseAi
 | |
|   module Summarization
 | |
|     module Strategies
 | |
|       class DiscourseAi < ::Summarization::Base
 | |
|         def display_name
 | |
|           "Discourse AI's #{model}"
 | |
|         end
 | |
| 
 | |
|         def correctly_configured?
 | |
|           SiteSetting.ai_summarization_discourse_service_api_endpoint.present? &&
 | |
|             SiteSetting.ai_summarization_discourse_service_api_key.present?
 | |
|         end
 | |
| 
 | |
|         def configuration_hint
 | |
|           I18n.t(
 | |
|             "discourse_ai.summarization.configuration_hint",
 | |
|             count: 2,
 | |
|             settings:
 | |
|               "ai_summarization_discourse_service_api_endpoint, ai_summarization_discourse_service_api_key",
 | |
|           )
 | |
|         end
 | |
| 
 | |
|         def summarize(content_text)
 | |
|           ::DiscourseAi::Inference::DiscourseClassifier.perform!(
 | |
|             "#{SiteSetting.ai_summarization_discourse_service_api_endpoint}/api/v1/classify",
 | |
|             model,
 | |
|             prompt(content_text),
 | |
|             SiteSetting.ai_summarization_discourse_service_api_key,
 | |
|           ).dig(:summary_text)
 | |
|         end
 | |
| 
 | |
|         def prompt(text)
 | |
|           ::DiscourseAi::Tokenizer::BertTokenizer.truncate(text, max_length)
 | |
|         end
 | |
| 
 | |
|         private
 | |
| 
 | |
|         def max_length
 | |
|           lengths = {
 | |
|             "bart-large-cnn-samsum" => 1024,
 | |
|             "flan-t5-base-samsum" => 512,
 | |
|             "long-t5-tglobal-base-16384-book-summary" => 16_384,
 | |
|           }
 | |
| 
 | |
|           lengths[model]
 | |
|         end
 | |
|       end
 | |
|     end
 | |
|   end
 | |
| end
 |