discourse-ai/lib/nsfw/classification.rb
Sam 6ddc17fd61
DEV: port directory structure to Zeitwerk (#319)
Previous to this change we relied on explicit loading for a files in Discourse AI.

This had a few downsides:

- Busywork whenever you add a file (an extra require relative)
- We were not keeping to conventions internally ... some places were OpenAI others are OpenAi
- Autoloader did not work which lead to lots of full application broken reloads when developing.

This moves all of DiscourseAI into a Zeitwerk compatible structure.

It also leaves some minimal amount of manual loading (automation - which is loading into an existing namespace that may or may not be there)

To avoid needing /lib/discourse_ai/... we mount a namespace thus we are able to keep /lib pointed at ::DiscourseAi

Various files were renamed to get around zeitwerk rules and minimize usage of custom inflections

Though we can get custom inflections to work it is not worth it, will require a Discourse core patch which means we create a hard dependency.
2023-11-29 15:17:46 +11:00

85 lines
2.3 KiB
Ruby

# frozen_string_literal: true
module DiscourseAi
module Nsfw
class Classification
def type
:nsfw
end
def can_classify?(target)
content_of(target).present?
end
def get_verdicts(classification_data)
classification_data
.map do |model_name, classifications|
verdict =
classifications.values.any? do |data|
send("#{model_name}_verdict?", data.except(:neutral, :target_classified_type))
end
[model_name, verdict]
end
.to_h
end
def should_flag_based_on?(verdicts)
return false if !SiteSetting.ai_nsfw_flag_automatically
verdicts.values.any?
end
def request(target_to_classify)
uploads_to_classify = content_of(target_to_classify)
available_models.reduce({}) do |memo, model|
memo[model] = uploads_to_classify.reduce({}) do |upl_memo, upload|
classification =
evaluate_with_model(model, upload).merge(target_classified_type: upload.class.name)
# 415 denotes that the image is not supported by the model, so we skip it
upl_memo[upload.id] = classification if classification.dig(:status) != 415
upl_memo
end
memo
end
end
private
def evaluate_with_model(model, upload)
upload_url = Discourse.store.cdn_url(upload.url)
upload_url = "#{Discourse.base_url_no_prefix}#{upload_url}" if upload_url.starts_with?("/")
DiscourseAi::Inference::DiscourseClassifier.perform!(
"#{SiteSetting.ai_nsfw_inference_service_api_endpoint}/api/v1/classify",
model,
upload_url,
SiteSetting.ai_nsfw_inference_service_api_key,
)
end
def available_models
SiteSetting.ai_nsfw_models.split("|")
end
def content_of(target_to_classify)
target_to_classify.uploads.to_a.select { |u| FileHelper.is_supported_image?(u.url) }
end
def opennsfw2_verdict?(clasification)
clasification.values.first.to_i >= SiteSetting.ai_nsfw_flag_threshold_general
end
def nsfw_detector_verdict?(classification)
classification.any? do |key, value|
value.to_i >= SiteSetting.send("ai_nsfw_flag_threshold_#{key}")
end
end
end
end
end