FEATURE: Handle invalid media in NSFW module (#57)
* FEATURE: Handle invalid media in NSFW module * fix lint
This commit is contained in:
parent
7e3cb0ea16
commit
66bf4c74c6
|
@ -35,9 +35,11 @@ module DiscourseAi
|
|||
|
||||
available_models.reduce({}) do |memo, model|
|
||||
memo[model] = uploads_to_classify.reduce({}) do |upl_memo, upload|
|
||||
upl_memo[upload.id] = evaluate_with_model(model, upload).merge(
|
||||
target_classified_type: upload.class.name,
|
||||
)
|
||||
classification =
|
||||
evaluate_with_model(model, upload).merge(target_classified_type: upload.class.name)
|
||||
|
||||
# 415 denotes that the image is not supported by the model, so we skip it
|
||||
upl_memo[upload.id] = classification if classification.dig(:status) != 415
|
||||
|
||||
upl_memo
|
||||
end
|
||||
|
@ -65,11 +67,7 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def content_of(target_to_classify)
|
||||
target_to_classify
|
||||
.uploads
|
||||
.where(extension: %w[png jpeg jpg PNG JPEG JPG])
|
||||
.to_a
|
||||
.select { |u| FileHelper.is_supported_image?(u.url) }
|
||||
target_to_classify.uploads.to_a.select { |u| FileHelper.is_supported_image?(u.url) }
|
||||
end
|
||||
|
||||
def opennsfw2_verdict?(clasification)
|
||||
|
|
|
@ -10,7 +10,7 @@ module ::DiscourseAi
|
|||
|
||||
response = Faraday.post(endpoint, { model: model, content: content }.to_json, headers)
|
||||
|
||||
raise Net::HTTPBadResponse unless response.status == 200
|
||||
raise Net::HTTPBadResponse if ![200, 415].include?(response.status)
|
||||
|
||||
JSON.parse(response.body, symbolize_names: true)
|
||||
end
|
||||
|
|
|
@ -59,6 +59,16 @@ describe DiscourseAi::NSFW::NSFWClassification do
|
|||
|
||||
assert_correctly_classified(classification, expected_classification)
|
||||
end
|
||||
|
||||
it "correctly skips unsupported uploads" do
|
||||
NSFWInferenceStubs.positive(upload_1)
|
||||
NSFWInferenceStubs.unsupported(upload_2)
|
||||
expected_classification = build_expected_classification(upload_1)
|
||||
|
||||
classification = subject.request(post)
|
||||
|
||||
assert_correctly_classified(classification, expected_classification)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -46,5 +46,17 @@ class NSFWInferenceStubs
|
|||
.with(body: JSON.dump(model: "opennsfw2", content: upload_url(upload)))
|
||||
.to_return(status: 200, body: JSON.dump(negative_result("opennsfw2")))
|
||||
end
|
||||
|
||||
def unsupported(upload)
|
||||
WebMock
|
||||
.stub_request(:post, endpoint)
|
||||
.with(body: JSON.dump(model: "nsfw_detector", content: upload_url(upload)))
|
||||
.to_return(status: 415, body: JSON.dump({ error: "Unsupported image type", status: 415 }))
|
||||
|
||||
WebMock
|
||||
.stub_request(:post, endpoint)
|
||||
.with(body: JSON.dump(model: "opennsfw2", content: upload_url(upload)))
|
||||
.to_return(status: 415, body: JSON.dump({ error: "Unsupported image type", status: 415 }))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue