FEATURE: Handle invalid media in NSFW module (#57)

* FEATURE: Handle invalid media in NSFW module

* fix lint
This commit is contained in:
Rafael dos Santos Silva 2023-05-11 15:35:39 -03:00 committed by GitHub
parent 7e3cb0ea16
commit 66bf4c74c6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 29 additions and 9 deletions

View File

@ -35,9 +35,11 @@ module DiscourseAi
available_models.reduce({}) do |memo, model| available_models.reduce({}) do |memo, model|
memo[model] = uploads_to_classify.reduce({}) do |upl_memo, upload| memo[model] = uploads_to_classify.reduce({}) do |upl_memo, upload|
upl_memo[upload.id] = evaluate_with_model(model, upload).merge( classification =
target_classified_type: upload.class.name, evaluate_with_model(model, upload).merge(target_classified_type: upload.class.name)
)
# 415 denotes that the image is not supported by the model, so we skip it
upl_memo[upload.id] = classification if classification.dig(:status) != 415
upl_memo upl_memo
end end
@ -65,11 +67,7 @@ module DiscourseAi
end end
def content_of(target_to_classify) def content_of(target_to_classify)
target_to_classify target_to_classify.uploads.to_a.select { |u| FileHelper.is_supported_image?(u.url) }
.uploads
.where(extension: %w[png jpeg jpg PNG JPEG JPG])
.to_a
.select { |u| FileHelper.is_supported_image?(u.url) }
end end
def opennsfw2_verdict?(clasification) def opennsfw2_verdict?(clasification)

View File

@ -10,7 +10,7 @@ module ::DiscourseAi
response = Faraday.post(endpoint, { model: model, content: content }.to_json, headers) response = Faraday.post(endpoint, { model: model, content: content }.to_json, headers)
raise Net::HTTPBadResponse unless response.status == 200 raise Net::HTTPBadResponse if ![200, 415].include?(response.status)
JSON.parse(response.body, symbolize_names: true) JSON.parse(response.body, symbolize_names: true)
end end

View File

@ -59,6 +59,16 @@ describe DiscourseAi::NSFW::NSFWClassification do
assert_correctly_classified(classification, expected_classification) assert_correctly_classified(classification, expected_classification)
end end
it "correctly skips unsupported uploads" do
NSFWInferenceStubs.positive(upload_1)
NSFWInferenceStubs.unsupported(upload_2)
expected_classification = build_expected_classification(upload_1)
classification = subject.request(post)
assert_correctly_classified(classification, expected_classification)
end
end end
end end
end end

View File

@ -46,5 +46,17 @@ class NSFWInferenceStubs
.with(body: JSON.dump(model: "opennsfw2", content: upload_url(upload))) .with(body: JSON.dump(model: "opennsfw2", content: upload_url(upload)))
.to_return(status: 200, body: JSON.dump(negative_result("opennsfw2"))) .to_return(status: 200, body: JSON.dump(negative_result("opennsfw2")))
end end
def unsupported(upload)
WebMock
.stub_request(:post, endpoint)
.with(body: JSON.dump(model: "nsfw_detector", content: upload_url(upload)))
.to_return(status: 415, body: JSON.dump({ error: "Unsupported image type", status: 415 }))
WebMock
.stub_request(:post, endpoint)
.with(body: JSON.dump(model: "opennsfw2", content: upload_url(upload)))
.to_return(status: 415, body: JSON.dump({ error: "Unsupported image type", status: 415 }))
end
end end
end end