FIX: invalid urls should not break store.has_been_uploaded?

Breaking this method has wide ramification including breaking
search indexing.
This commit is contained in:
Sam Saffron 2020-06-25 15:00:15 +10:00
parent 88459e08c9
commit 689568c216
No known key found for this signature in database
GPG Key ID: B9606168D2FFD9F5
2 changed files with 8 additions and 4 deletions

View File

@ -89,7 +89,10 @@ module FileStore
begin
parsed_url = URI.parse(UrlHelper.encode(url))
rescue URI::InvalidURIError, URI::InvalidComponentError
rescue
# There are many exceptions possible here including Addressable::URI:: excpetions
# and URI:: exceptions, catch all may seem wide, but it makes no sense to raise ever
# on an invalid url here
return false
end

View File

@ -283,9 +283,10 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do
let(:s3_bucket) { resource.bucket(SiteSetting.s3_upload_bucket) }
let(:s3_helper) { store.s3_helper }
it "returns false for blank urls" do
url = ""
expect(store.has_been_uploaded?(url)).to eq(false)
it "returns false for blank urls and bad urls" do
expect(store.has_been_uploaded?("")).to eq(false)
expect(store.has_been_uploaded?("http://test@test.com:test/test.git")).to eq(false)
expect(store.has_been_uploaded?("http:///+test@test.com/test.git")).to eq(false)
end
it "returns true if the base hostname is the same for both urls" do