2023-02-24 11:25:02 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2023-03-14 15:03:50 -04:00
|
|
|
module DiscourseAi
|
2023-11-28 23:17:46 -05:00
|
|
|
module Nsfw
|
|
|
|
class Classification
|
2023-02-24 11:25:02 -05:00
|
|
|
def type
|
|
|
|
:nsfw
|
|
|
|
end
|
|
|
|
|
|
|
|
def can_classify?(target)
|
|
|
|
content_of(target).present?
|
|
|
|
end
|
|
|
|
|
2023-03-07 13:39:28 -05:00
|
|
|
def get_verdicts(classification_data)
|
|
|
|
classification_data
|
|
|
|
.map do |model_name, classifications|
|
|
|
|
verdict =
|
|
|
|
classifications.values.any? do |data|
|
|
|
|
send("#{model_name}_verdict?", data.except(:neutral, :target_classified_type))
|
|
|
|
end
|
|
|
|
|
|
|
|
[model_name, verdict]
|
|
|
|
end
|
|
|
|
.to_h
|
|
|
|
end
|
|
|
|
|
|
|
|
def should_flag_based_on?(verdicts)
|
2023-02-24 11:25:02 -05:00
|
|
|
return false if !SiteSetting.ai_nsfw_flag_automatically
|
|
|
|
|
2023-03-07 13:39:28 -05:00
|
|
|
verdicts.values.any?
|
2023-02-24 11:25:02 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def request(target_to_classify)
|
|
|
|
uploads_to_classify = content_of(target_to_classify)
|
|
|
|
|
2023-02-27 14:21:40 -05:00
|
|
|
available_models.reduce({}) do |memo, model|
|
|
|
|
memo[model] = uploads_to_classify.reduce({}) do |upl_memo, upload|
|
2023-05-11 14:35:39 -04:00
|
|
|
classification =
|
|
|
|
evaluate_with_model(model, upload).merge(target_classified_type: upload.class.name)
|
|
|
|
|
|
|
|
# 415 denotes that the image is not supported by the model, so we skip it
|
|
|
|
upl_memo[upload.id] = classification if classification.dig(:status) != 415
|
2023-02-27 14:21:40 -05:00
|
|
|
|
|
|
|
upl_memo
|
2023-02-24 11:25:02 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
memo
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def evaluate_with_model(model, upload)
|
|
|
|
upload_url = Discourse.store.cdn_url(upload.url)
|
|
|
|
upload_url = "#{Discourse.base_url_no_prefix}#{upload_url}" if upload_url.starts_with?("/")
|
|
|
|
|
2024-11-25 11:12:43 -05:00
|
|
|
DiscourseAi::Inference::DiscourseClassifier.new(
|
2024-01-10 17:23:07 -05:00
|
|
|
"#{endpoint}/api/v1/classify",
|
2023-02-24 11:25:02 -05:00
|
|
|
SiteSetting.ai_nsfw_inference_service_api_key,
|
2024-11-25 11:12:43 -05:00
|
|
|
model,
|
|
|
|
).perform!(upload_url)
|
2023-02-24 11:25:02 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def available_models
|
|
|
|
SiteSetting.ai_nsfw_models.split("|")
|
|
|
|
end
|
|
|
|
|
|
|
|
def content_of(target_to_classify)
|
2023-05-11 14:35:39 -04:00
|
|
|
target_to_classify.uploads.to_a.select { |u| FileHelper.is_supported_image?(u.url) }
|
2023-02-24 11:25:02 -05:00
|
|
|
end
|
|
|
|
|
2024-01-19 06:51:26 -05:00
|
|
|
def opennsfw2_verdict?(classification)
|
|
|
|
classification.values.first.to_i >= SiteSetting.ai_nsfw_flag_threshold_general
|
2023-02-24 11:25:02 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def nsfw_detector_verdict?(classification)
|
2023-02-27 14:21:40 -05:00
|
|
|
classification.any? do |key, value|
|
|
|
|
value.to_i >= SiteSetting.send("ai_nsfw_flag_threshold_#{key}")
|
2023-02-24 11:25:02 -05:00
|
|
|
end
|
|
|
|
end
|
2024-01-10 17:23:07 -05:00
|
|
|
|
|
|
|
def endpoint
|
|
|
|
if SiteSetting.ai_nsfw_inference_service_api_endpoint_srv.present?
|
|
|
|
service =
|
|
|
|
DiscourseAi::Utils::DnsSrv.lookup(
|
|
|
|
SiteSetting.ai_nsfw_inference_service_api_endpoint_srv,
|
|
|
|
)
|
|
|
|
"https://#{service.target}:#{service.port}"
|
|
|
|
else
|
|
|
|
SiteSetting.ai_nsfw_inference_service_api_endpoint
|
|
|
|
end
|
|
|
|
end
|
2023-02-24 11:25:02 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|