| 
									
										
										
										
											2023-02-24 13:25:02 -03:00
										 |  |  | # frozen_string_literal: true | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-14 16:03:50 -03:00
										 |  |  | module DiscourseAi | 
					
						
							| 
									
										
										
										
											2023-02-24 13:25:02 -03:00
										 |  |  |   module NSFW | 
					
						
							|  |  |  |     class NSFWClassification | 
					
						
							|  |  |  |       def type | 
					
						
							|  |  |  |         :nsfw | 
					
						
							|  |  |  |       end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |       def can_classify?(target) | 
					
						
							|  |  |  |         content_of(target).present? | 
					
						
							|  |  |  |       end | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-07 15:39:28 -03:00
										 |  |  |       def get_verdicts(classification_data) | 
					
						
							|  |  |  |         classification_data | 
					
						
							|  |  |  |           .map do |model_name, classifications| | 
					
						
							|  |  |  |             verdict = | 
					
						
							|  |  |  |               classifications.values.any? do |data| | 
					
						
							|  |  |  |                 send("#{model_name}_verdict?", data.except(:neutral, :target_classified_type)) | 
					
						
							|  |  |  |               end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             [model_name, verdict] | 
					
						
							|  |  |  |           end | 
					
						
							|  |  |  |           .to_h | 
					
						
							|  |  |  |       end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |       def should_flag_based_on?(verdicts) | 
					
						
							| 
									
										
										
										
											2023-02-24 13:25:02 -03:00
										 |  |  |         return false if !SiteSetting.ai_nsfw_flag_automatically | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-07 15:39:28 -03:00
										 |  |  |         verdicts.values.any? | 
					
						
							| 
									
										
										
										
											2023-02-24 13:25:02 -03:00
										 |  |  |       end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |       def request(target_to_classify) | 
					
						
							|  |  |  |         uploads_to_classify = content_of(target_to_classify) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-02-27 16:21:40 -03:00
										 |  |  |         available_models.reduce({}) do |memo, model| | 
					
						
							|  |  |  |           memo[model] = uploads_to_classify.reduce({}) do |upl_memo, upload| | 
					
						
							| 
									
										
										
										
											2023-05-11 15:35:39 -03:00
										 |  |  |             classification = | 
					
						
							|  |  |  |               evaluate_with_model(model, upload).merge(target_classified_type: upload.class.name) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # 415 denotes that the image is not supported by the model, so we skip it | 
					
						
							|  |  |  |             upl_memo[upload.id] = classification if classification.dig(:status) != 415
 | 
					
						
							| 
									
										
										
										
											2023-02-27 16:21:40 -03:00
										 |  |  | 
 | 
					
						
							|  |  |  |             upl_memo | 
					
						
							| 
									
										
										
										
											2023-02-24 13:25:02 -03:00
										 |  |  |           end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |           memo | 
					
						
							|  |  |  |         end | 
					
						
							|  |  |  |       end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |       private | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |       def evaluate_with_model(model, upload) | 
					
						
							|  |  |  |         upload_url = Discourse.store.cdn_url(upload.url) | 
					
						
							|  |  |  |         upload_url = "#{Discourse.base_url_no_prefix}#{upload_url}" if upload_url.starts_with?("/") | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-03-14 16:03:50 -03:00
										 |  |  |         DiscourseAi::Inference::DiscourseClassifier.perform!( | 
					
						
							| 
									
										
										
										
											2023-02-24 13:25:02 -03:00
										 |  |  |           "#{SiteSetting.ai_nsfw_inference_service_api_endpoint}/api/v1/classify", | 
					
						
							|  |  |  |           model, | 
					
						
							|  |  |  |           upload_url, | 
					
						
							|  |  |  |           SiteSetting.ai_nsfw_inference_service_api_key, | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  |       end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |       def available_models | 
					
						
							|  |  |  |         SiteSetting.ai_nsfw_models.split("|") | 
					
						
							|  |  |  |       end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |       def content_of(target_to_classify) | 
					
						
							| 
									
										
										
										
											2023-05-11 15:35:39 -03:00
										 |  |  |         target_to_classify.uploads.to_a.select { |u| FileHelper.is_supported_image?(u.url) } | 
					
						
							| 
									
										
										
										
											2023-02-24 13:25:02 -03:00
										 |  |  |       end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |       def opennsfw2_verdict?(clasification) | 
					
						
							|  |  |  |         clasification.values.first.to_i >= SiteSetting.ai_nsfw_flag_threshold_general | 
					
						
							|  |  |  |       end | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |       def nsfw_detector_verdict?(classification) | 
					
						
							| 
									
										
										
										
											2023-02-27 16:21:40 -03:00
										 |  |  |         classification.any? do |key, value| | 
					
						
							|  |  |  |           value.to_i >= SiteSetting.send("ai_nsfw_flag_threshold_#{key}") | 
					
						
							| 
									
										
										
										
											2023-02-24 13:25:02 -03:00
										 |  |  |         end | 
					
						
							|  |  |  |       end | 
					
						
							|  |  |  |     end | 
					
						
							|  |  |  |   end | 
					
						
							|  |  |  | end |